Merge branch 'master' into perl5-pod-folding

This commit is contained in:
Martin Aeschlimann 2019-10-24 11:15:03 +02:00 committed by GitHub
commit 22bf38cade
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3495 changed files with 249143 additions and 118627 deletions

163
.github/classifier.yml vendored
View file

@ -18,41 +18,128 @@
assignees: [ weinand ],
assignLabel: false
},
diff-editor: [],
dropdown: [],
editor: {
diff-editor: : {
assignees: [],
assignLabel: false
},
editor-1000-limit: [],
editor-autoclosing: [],
editor-autoindent: [],
editor-brackets: [],
editor-clipboard: [],
editor-code-actions: [],
editor-code-lens: [],
editor-color-picker: [],
editor-colors: [],
editor-columnselect: [],
editor-commands: [],
editor-contrib: [],
editor-drag-and-drop: [],
editor-find: [],
editor-folding: [],
editor-hover: [],
editor-ime: [],
editor-input: [],
editor-ligatures: [],
editor-links: [],
editor-minimap: [],
editor-multicursor: [],
editor-parameter-hints: [],
editor-rendering: [],
editor-smooth: [],
editor-symbols: [],
editor-textbuffer: [],
editor-wrapping: [],
emmet: [ octref, ramya-rao-a ],
dropdown: [],
editor: : {
assignees: [],
assignLabel: false
},
editor-1000-limit: : {
assignees: [],
assignLabel: false
},
editor-autoclosing: : {
assignees: [],
assignLabel: false
},
editor-autoindent: : {
assignees: [],
assignLabel: false
},
editor-brackets: : {
assignees: [],
assignLabel: false
},
editor-clipboard: : {
assignees: [],
assignLabel: false
},
editor-code-actions: : {
assignees: [],
assignLabel: false
},
editor-code-lens: : {
assignees: [],
assignLabel: false
},
editor-color-picker: : {
assignees: [],
assignLabel: false
},
editor-colors: : {
assignees: [],
assignLabel: false
},
editor-columnselect: : {
assignees: [],
assignLabel: false
},
editor-commands: : {
assignees: [],
assignLabel: false
},
editor-contrib: : {
assignees: [],
assignLabel: false
},
editor-drag-and-drop: : {
assignees: [],
assignLabel: false
},
editor-find: : {
assignees: [],
assignLabel: false
},
editor-folding: : {
assignees: [],
assignLabel: false
},
editor-hover: : {
assignees: [],
assignLabel: false
},
editor-ime: : {
assignees: [],
assignLabel: false
},
editor-input: : {
assignees: [],
assignLabel: false
},
editor-ligatures: : {
assignees: [],
assignLabel: false
},
editor-links: : {
assignees: [],
assignLabel: false
},
editor-minimap: : {
assignees: [],
assignLabel: false
},
editor-multicursor: : {
assignees: [],
assignLabel: false
},
editor-parameter-hints: : {
assignees: [],
assignLabel: false
},
editor-rendering: : {
assignees: [],
assignLabel: false
},
editor-smooth: : {
assignees: [],
assignLabel: false
},
editor-symbols: : {
assignees: [],
assignLabel: false
},
editor-textbuffer: : {
assignees: [],
assignLabel: false
},
editor-wrapping: : {
assignees: [],
assignLabel: false
},
emmet: [ octref ],
error-list: [],
explorer-custom: [],
extension-host: [],
@ -81,14 +168,20 @@
hot-exit: [],
html: [],
install-update: [],
integrated-terminal: [ Tyriar ],
integrated-terminal: [],
integration-test: [],
intellisense-config: [],
issue-reporter: [ RMacfarlane ],
javascript: [ mjbvz ],
json: [],
keyboard-layout: [],
keybindings: [],
keyboard-layout: : {
assignees: [],
assignLabel: false
},
keybindings: : {
assignees: [],
assignLabel: false
},
keybindings-editor: [],
lang-diagnostics: [],
languages basic: [],

34
.github/commands.yml vendored
View file

@ -4,7 +4,7 @@
{
type: 'comment',
name: 'question',
allowUsers: ['cleidigh', 'usernamehw'],
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'updateLabels',
addLabel: '*question'
},
@ -18,43 +18,63 @@
{
type: 'label',
name: '*dev-question',
allowTriggerByBot: true,
action: 'close',
comment: "We have a great developer community [over on slack](https://aka.ms/vscode-dev-community) where extension authors help each other. This is a great place for you to ask questions and find support.\n\nHappy Coding!"
},
{
type: 'label',
name: '*extension-candidate',
allowTriggerByBot: true,
action: 'close',
comment: "We try to keep VS Code lean and we think the functionality you're asking for is great for a VS Code extension. Maybe you can already find one that suits you in the [VS Code Marketplace](https://aka.ms/vscodemarketplace). Just in case, in a few simple steps you can get started [writing your own extension](https://aka.ms/vscodewritingextensions). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
},
{
type: 'label',
name: '*not-reproducible',
allowTriggerByBot: true,
action: 'close',
comment: "We closed this issue because we are unable to reproduce the problem with the steps you describe. Chances are we've already fixed your problem in a recent version of VS Code. If not, please ask us to reopen the issue and provide us with more detail. Our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines might help you with that.\n\nHappy Coding!"
},
{
type: 'label',
name: '*out-of-scope',
allowTriggerByBot: true,
action: 'close',
comment: "This issue is being closed to keep the number of issues in our inbox on a manageable level, we are closing issues that are not going to be addressed in the foreseeable future: We look at the number of votes the issue has received and the number of duplicate issues filed. More details [here](https://aka.ms/vscode-out-of-scope). If you disagree and feel that this issue is crucial: We are happy to listen and to reconsider.\n\nIf you wonder what we are up to, please see our [roadmap](https://aka.ms/vscoderoadmap) and [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nThanks for your understanding and happy coding!"
comment: "We closed this issue because we don't plan to address it in the foreseeable future. You can find more detailed information about our decision-making process [here](https://aka.ms/vscode-out-of-scope). If you disagree and feel that this issue is crucial: We are happy to listen and to reconsider.\n\nIf you wonder what we are up to, please see our [roadmap](https://aka.ms/vscoderoadmap) and [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nThanks for your understanding and happy coding!"
},
{
type: 'comment',
name: 'causedByExtension',
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'updateLabels',
addLabel: '*caused-by-extension'
},
{
type: 'label',
name: '*caused-by-extension',
allowTriggerByBot: true,
action: 'close',
comment: "This issue is caused by an extension, please file it with the repository (or contact) the extension has linked in its overview in VS Code or the [marketplace](https://aka.ms/vscodemarketplace) for VS Code. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
},
{
type: 'label',
name: '*as-designed',
allowTriggerByBot: true,
action: 'close',
comment: "The described behavior is how it is expected to work. If you disagree, please explain what is expected and what is not in more detail. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
},
{
type: 'label',
name: '*english-please',
allowTriggerByBot: true,
action: 'close',
comment: "This issue is being closed because its description is not in English, that makes it hard for us to work on it. Please open a new issue with an English description. You might find [Bing Translator](https://www.bing.com/translator) useful."
},
{
type: 'comment',
name: 'duplicate',
allowUsers: ['cleidigh', 'usernamehw'],
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'updateLabels',
addLabel: '*duplicate'
},
@ -68,7 +88,7 @@
{
type: 'comment',
name: 'confirm',
allowUsers: ['cleidigh', 'usernamehw'],
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'updateLabels',
addLabel: 'confirmed',
removeLabel: 'confirmation-pending'
@ -84,14 +104,14 @@
{
type: 'comment',
name: 'findDuplicates',
allowUsers: ['cleidigh', 'usernamehw'],
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'comment',
comment: "Potential duplicates:\n${potentialDuplicates}"
},
{
type: 'comment',
name: 'needsMoreInfo',
allowUsers: ['cleidigh', 'usernamehw'],
allowUsers: ['cleidigh', 'usernamehw', 'gjsjohnmurray', 'IllusionMH'],
action: 'updateLabels',
addLabel: 'needs more info',
comment: "Thanks for creating this issue! We figured it's missing some basic information or in some other way doesn't follow our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines. Please take the time to review these and update the issue.\n\nHappy Coding!"
@ -99,7 +119,7 @@
{
type: 'comment',
name: 'a11ymas',
allowUsers: ['AccessibilityTestingTeam-TCS'],
allowUsers: ['AccessibilityTestingTeam-TCS', 'dixitsonali95', 'Mohini78', 'ChitrarupaSharma', 'mspatil110', 'umasarath52', 'v-umnaik'],
action: 'updateLabels',
addLabel: 'a11ymas'
},

9
.github/pull_request_template.md vendored Normal file
View file

@ -0,0 +1,9 @@
<!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines:
https://github.com/Microsoft/vscode/wiki/How-to-Contribute#pull-requests.
* Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them.
-->
This PR fixes #

112
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,112 @@
name: CI
on:
push:
branches:
- master
- release/*
pull_request:
branches:
- master
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn monaco-compile-check
name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
- run: yarn download-builtin-extensions
name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
- run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
name: Run Integration Tests
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- uses: actions/setup-python@v1
with:
python-version: '2.x'
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn monaco-compile-check
name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
- run: yarn download-builtin-extensions
name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests
- run: .\scripts\test-integration.bat --tfs "Integration Tests"
name: Run Integration Tests
darwin:
runs-on: macos-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn monaco-compile-check
name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
- run: yarn download-builtin-extensions
name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
- run: ./scripts/test-integration.sh --tfs "Integration Tests"
name: Run Integration Tests

11
.gitignore vendored
View file

@ -1,4 +1,5 @@
.DS_Store
.cache
npm-debug.log
Thumbs.db
node_modules/
@ -14,6 +15,16 @@ out-editor-min/
out-monaco-editor-core/
out-vscode/
out-vscode-min/
out-vscode-reh/
out-vscode-reh-min/
out-vscode-reh-pkg/
out-vscode-reh-web/
out-vscode-reh-web-min/
out-vscode-reh-web-pkg/
out-vscode-web/
out-vscode-web-min/
src/vs/server
resources/server
build/node_modules
coverage/
test_data/

6
.prettierrc.json Normal file
View file

@ -0,0 +1,6 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

View file

@ -1,23 +1,61 @@
{
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"licenseDetail"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
"oneOf": [
{
"type": "object",
"required": [
"name",
"prependLicenseText"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
}
},
"licenseDetail": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
{
"type": "object",
"required": [
"name",
"fullLicenseText"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
}
}
}
]
}
}
}

View file

@ -4,6 +4,7 @@
"recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"msjsdiag.debugger-for-chrome"
]
}

28
.vscode/launch.json vendored
View file

@ -123,8 +123,7 @@
"request": "launch",
"name": "Launch VS Code",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat",
"timeout": 20000
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.sh"
@ -132,6 +131,7 @@
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.sh"
},
"timeout": 20000,
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
},
@ -148,6 +148,9 @@
"request": "launch",
"name": "Launch VS Code (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/code.sh",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat",
},
"runtimeArgs": [
"--no-cached-data"
],
@ -155,6 +158,22 @@
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "node",
"request": "launch",
"name": "Launch VS Code (Web)",
"runtimeExecutable": "yarn",
"runtimeArgs": [
"web"
],
},
{
"type": "chrome",
"request": "launch",
"name": "Launch VS Code (Web, Chrome)",
"url": "http://localhost:8080",
"preLaunchTask": "Run web"
},
{
"type": "node",
"request": "launch",
@ -213,7 +232,10 @@
"cwd": "${workspaceFolder}",
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
],
"env": {
"MOCHA_COLORS": "true"
}
},
{
"type": "node",

View file

@ -56,5 +56,10 @@
"url": "./.vscode/cglicenses.schema.json"
}
],
"git.ignoreLimitWarning": true
}
"git.ignoreLimitWarning": true,
"remote.extensionKind": {
"msjsdiag.debugger-for-chrome": "workspace"
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true
}

46
.vscode/tasks.json vendored
View file

@ -5,7 +5,10 @@
"type": "npm",
"script": "watch",
"label": "Build VS Code",
"group": "build",
"group": {
"kind": "build",
"isDefault": true
},
"isBackground": true,
"presentation": {
"reveal": "never"
@ -28,6 +31,20 @@
}
}
},
{
"type": "npm",
"script": "strict-initialization-watch",
"label": "TS - Strict Initialization",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-initialization",
"applyTo": "allDocuments"
}
},
{
"type": "gulp",
"task": "tslint",
@ -59,14 +76,33 @@
"problemMatcher": []
},
{
"type": "gulp",
"task": "electron",
"type": "npm",
"script": "electron",
"label": "Download electron"
},
{
"type": "gulp",
"task": "hygiene",
"problemMatcher": []
}
},
{
"type": "shell",
"command": "yarn web -- --no-launch",
"label": "Run web",
"isBackground": true,
// This section to make error go away when launching the debug config
"problemMatcher": {
"pattern": {
"regexp": ""
},
"background": {
"beginsPattern": ".*node .*",
"endsPattern": "Web UI available at .*"
}
},
"presentation": {
"reveal": "never"
}
},
]
}
}

View file

@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "3.1.6"
target "6.0.12"
runtime "electron"

View file

@ -1 +0,0 @@
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.

View file

@ -1,4 +1,4 @@
# Visual Studio Code - Open Source
# Visual Studio Code - Open Source ("Code - OSS")
<!-- [![Build Status](https://dev.azure.com/vscode/VSCode/_apis/build/status/VS%20Code?branchName=master)](https://aka.ms/vscode-builds) -->
[![Build Status](https://dev.azure.com/vscode/VSCode/_apis/build/status/VS%20Code?branchName=master)](https://dev.azure.com/vscode/VSCode/_build/latest?definitionId=12)
@ -6,55 +6,64 @@
[![Bugs](https://img.shields.io/github/issues/Microsoft/vscode/bug.svg)](https://github.com/Microsoft/vscode/issues?utf8=✓&q=is%3Aissue+is%3Aopen+label%3Abug)
[![Gitter](https://img.shields.io/badge/chat-on%20gitter-yellow.svg)](https://gitter.im/Microsoft/vscode)
[VS Code](https://code.visualstudio.com) is a type of tool that combines the simplicity of
a code editor with what developers need for their core edit-build-debug cycle. It provides comprehensive editing and debugging support, an extensibility model, and lightweight integration with existing tools.
## The Repository
VS Code is updated monthly with new features and bug fixes. You can download it for Windows, macOS, and Linux on [VS Code's website](https://code.visualstudio.com/Download). To get the latest releases every day, you can install the [Insiders version of VS Code](https://code.visualstudio.com/insiders). This builds from the master branch and is updated daily at the very least.
This repository ("`Code - OSS`") is where we (Microsoft) develop the [Visual Studio Code](https://code.visualstudio.com) product. Not only do we work on code and issues here, we also publish our [roadmap](https://github.com/microsoft/vscode/wiki/Roadmap), [monthly iteration plans](https://github.com/microsoft/vscode/wiki/Iteration-Plans), and our [endgame plans](https://github.com/microsoft/vscode/wiki/Running-the-Endgame). This source code is available to everyone under the standard [MIT license](https://github.com/microsoft/vscode/blob/master/LICENSE.txt).
## Visual Studio Code
<p align="center">
<img alt="VS Code in action" src="https://cloud.githubusercontent.com/assets/11839736/16642200/6624dde0-43bd-11e6-8595-c81885ba0dc2.png">
<img alt="VS Code in action" src="https://user-images.githubusercontent.com/1487073/58344409-70473b80-7e0a-11e9-8570-b2efc6f8fa44.png">
</p>
The [`vscode`](https://github.com/microsoft/vscode) repository is where VS Code is developed and there are many ways in which you can participate in the project, for example:
[Visual Studio Code](https://code.visualstudio.com) is a distribution of the `Code - OSS` repository with Microsoft specific customizations released under a traditional [Microsoft product license](https://code.visualstudio.com/License/).
[Visual Studio Code](https://code.visualstudio.com) combines the simplicity of a code editor with what developers need for their core edit-build-debug cycle. It provides comprehensive code editing, navigation, and understanding support along with lightweight debugging, a rich extensibility model, and lightweight integration with existing tools.
Visual Studio Code is updated monthly with new features and bug fixes. You can download it for Windows, macOS, and Linux on [Visual Studio Code's website](https://code.visualstudio.com/Download). To get the latest releases every day, install the [Insiders build](https://code.visualstudio.com/insiders).
* [Submit bugs and feature requests](https://github.com/microsoft/vscode/issues) and help us verify as they are checked in.
* Review [source code changes](https://github.com/microsoft/vscode/pulls).
* Review the [documentation](https://github.com/microsoft/vscode-docs) and make pull requests for anything from typos to new content.
## Contributing
There are many ways in which you can participate in the project, for example:
* [Submit bugs and feature requests](https://github.com/microsoft/vscode/issues), and help us verify as they are checked in
* Review [source code changes](https://github.com/microsoft/vscode/pulls)
* Review the [documentation](https://github.com/microsoft/vscode-docs) and make pull requests for anything from typos to new content
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/Microsoft/vscode/wiki/How-to-Contribute), which covers the following:
* [How to build and run from source](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#build-and-run)
* [The development workflow, including debugging and running tests](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#debugging)
* [Coding Guidelines](https://github.com/Microsoft/vscode/wiki/Coding-Guidelines)
* [Coding guidelines](https://github.com/Microsoft/vscode/wiki/Coding-Guidelines)
* [Submitting pull requests](https://github.com/Microsoft/vscode/wiki/How-to-Contribute#pull-requests)
* [Finding an issue to work on](https://github.com/microsoft/vscode/wiki/How-to-Contribute#where-to-contribute)
* [Contributing to translations](https://aka.ms/vscodeloc)
Please also see our [Code of Conduct](CODE_OF_CONDUCT.md).
## Feedback
* Ask a question on [Stack Overflow](https://stackoverflow.com/questions/tagged/vscode).
* Request a new feature on [GitHub](CONTRIBUTING.md).
* Vote for [Popular Feature Requests](https://github.com/Microsoft/vscode/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc).
* File a bug in [GitHub Issues](https://github.com/Microsoft/vscode/issues).
* [Tweet](https://twitter.com/code) us with any other feedback.
* Ask a question on [Stack Overflow](https://stackoverflow.com/questions/tagged/vscode)
* [Request a new feature](CONTRIBUTING.md)
* Up vote [popular feature requests](https://github.com/Microsoft/vscode/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc)
* [File an issue](https://github.com/Microsoft/vscode/issues)
* Follow [@code](https://twitter.com/code) and let us know what you think!
## Related Projects
Many of the core components and extensions to Code live in their own repositories on GitHub. For example, the [node debug adapter](https://github.com/microsoft/vscode-node-debug) and the [mono debug adapter](https://github.com/microsoft/vscode-mono-debug) have their own repositories.
For a complete list, please visit the [Related Projects](https://github.com/Microsoft/vscode/wiki/Related-Projects) page on our [wiki](https://github.com/Microsoft/vscode/wiki).
Many of the core components and extensions to Code live in their own repositories on GitHub. For example, the [node debug adapter](https://github.com/microsoft/vscode-node-debug) and the [mono debug adapter](https://github.com/microsoft/vscode-mono-debug) have their own repositories. For a complete list, please visit the [Related Projects](https://github.com/Microsoft/vscode/wiki/Related-Projects) page on our [wiki](https://github.com/Microsoft/vscode/wiki).
## Bundled Extensions
Code ships with a set of extensions. These extensions are located in the [extensions](extensions) folder.
These extensions include grammars and snippets for several languages. Extensions that provide rich language support (code completion, go to definition) for a language have the suffix 'language-features'. For example, the 'json' extension provides coloring for JSON and the 'json-language-features' provides rich language support for JSON.
Code includes a set of built-in extensions located in the [extensions](extensions) folder, including grammars and snippets for many languages. Extensions that provide rich language support (code completion, Go to Definition) for a language have the suffix `language-features`. For example, the `json` extension provides coloring for `JSON` and the `json-language-features` provides rich language support for `JSON`.
## Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## License
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [MIT](LICENSE.txt) License.
Licensed under the [MIT](LICENSE.txt) license.

View file

@ -5,68 +5,69 @@ Do Not Translate or Localize
This project incorporates components from the projects listed below. The original copyright notices and the licenses under which Microsoft received such components are set forth below. Microsoft reserves all rights not expressly granted herein, whether by implication, estoppel or otherwise.
1. atom/language-clojure version 0.22.6 (https://github.com/atom/language-clojure)
1. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
2. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
3. atom/language-java version 0.31.1 (https://github.com/atom/language-java)
4. atom/language-objective-c version 0.15.0 (https://github.com/atom/language-objective-c)
5. atom/language-sass version 0.61.4 (https://github.com/atom/language-sass)
6. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
7. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
8. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
9. daaain/Handlebars version 1.7.1 (https://github.com/daaain/Handlebars)
10. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
11. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
12. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
13. Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
14. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
15. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
16. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
17. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
18. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
19. Ikuyadeu/vscode-R version 0.5.5 (https://github.com/Ikuyadeu/vscode-R)
3. atom/language-java version 0.31.3 (https://github.com/atom/language-java)
4. atom/language-sass version 0.61.4 (https://github.com/atom/language-sass)
5. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
6. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
7. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
8. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
9. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
10. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
11. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
12. Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
13. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
14. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
15. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
16. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
17. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
18. Ikuyadeu/vscode-R version 0.5.5 (https://github.com/Ikuyadeu/vscode-R)
19. insane version 2.6.2 (https://github.com/bevacqua/insane)
20. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
21. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
22. jeff-hykin/cpp-textmate-grammar version 1.4.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
23. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
24. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
25. language-docker (https://github.com/moby/moby)
26. language-go version 0.44.3 (https://github.com/atom/language-go)
27. language-less version 0.34.2 (https://github.com/atom/language-less)
28. language-php version 0.44.1 (https://github.com/atom/language-php)
29. language-rust version 0.4.12 (https://github.com/zargony/atom-language-rust)
30. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
31. marked version 0.5.0 (https://github.com/markedjs/marked)
32. mdn-data version 1.1.12 (https://github.com/mdn/data)
33. Microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/Microsoft/TypeScript-TmLanguage)
34. Microsoft/vscode-JSON.tmLanguage (https://github.com/Microsoft/vscode-JSON.tmLanguage)
35. Microsoft/vscode-mssql version 1.4.0 (https://github.com/Microsoft/vscode-mssql)
36. mmims/language-batchfile version 0.7.5 (https://github.com/mmims/language-batchfile)
37. octicons version 8.3.0 (https://github.com/primer/octicons)
38. octref/language-css (https://github.com/octref/language-css)
39. PowerShell/EditorSyntax (https://github.com/powershell/editorsyntax)
40. promise-polyfill version 8.0.0 (https://github.com/taylorhakes/promise-polyfill)
41. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
42. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
43. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
44. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
45. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
46. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
47. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
48. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
49. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
50. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
51. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
52. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
53. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
54. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
55. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
56. TypeScript-TmLanguage version 0.1.8 (https://github.com/Microsoft/TypeScript-TmLanguage)
57. TypeScript-TmLanguage version 1.0.0 (https://github.com/Microsoft/TypeScript-TmLanguage)
58. Unicode version 12.0.0 (http://www.unicode.org/)
59. vscode-logfile-highlighter version 2.4.1 (https://github.com/emilast/vscode-logfile-highlighter)
60. vscode-octicons-font version 1.0.0 (https://github.com/Microsoft/vscode-octicons-font)
61. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
62. Web Background Synchronization (https://github.com/WICG/BackgroundSync)
22. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
23. jeff-hykin/cpp-textmate-grammar version 1.14.6 (https://github.com/jeff-hykin/cpp-textmate-grammar)
24. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
25. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
26. language-docker (https://github.com/moby/moby)
27. language-go version 0.44.3 (https://github.com/atom/language-go)
28. language-less version 0.34.2 (https://github.com/atom/language-less)
29. language-php version 0.44.1 (https://github.com/atom/language-php)
30. language-rust version 0.4.12 (https://github.com/zargony/atom-language-rust)
31. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
32. marked version 0.6.2 (https://github.com/markedjs/marked)
33. mdn-data version 1.1.12 (https://github.com/mdn/data)
34. Microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/Microsoft/TypeScript-TmLanguage)
35. Microsoft/vscode-JSON.tmLanguage (https://github.com/Microsoft/vscode-JSON.tmLanguage)
36. Microsoft/vscode-mssql version 1.6.0 (https://github.com/Microsoft/vscode-mssql)
37. mmims/language-batchfile version 0.7.5 (https://github.com/mmims/language-batchfile)
38. octicons version 8.3.0 (https://github.com/primer/octicons)
39. octref/language-css version 0.42.11 (https://github.com/octref/language-css)
40. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
41. promise-polyfill version 8.0.0 (https://github.com/taylorhakes/promise-polyfill)
42. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
43. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
44. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
45. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
46. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
47. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
48. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
49. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
50. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
51. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
52. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
53. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
54. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
55. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
56. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
57. TypeScript-TmLanguage version 0.1.8 (https://github.com/Microsoft/TypeScript-TmLanguage)
58. TypeScript-TmLanguage version 1.0.0 (https://github.com/Microsoft/TypeScript-TmLanguage)
59. Unicode version 12.0.0 (http://www.unicode.org/)
60. vscode-logfile-highlighter version 2.4.1 (https://github.com/emilast/vscode-logfile-highlighter)
61. vscode-octicons-font version 1.3.2 (https://github.com/Microsoft/vscode-octicons-font)
62. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
63. Web Background Synchronization (https://github.com/WICG/BackgroundSync)
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
@ -213,43 +214,6 @@ suitability for any purpose.
=========================================
END OF atom/language-java NOTICES AND INFORMATION
%% atom/language-objective-c NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This package was derived from a TextMate bundle located at
https://github.com/textmate/objective-c.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.
=========================================
END OF atom/language-objective-c NOTICES AND INFORMATION
%% atom/language-sass NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@ -648,6 +612,31 @@ SOFTWARE.
=========================================
END OF Ikuyadeu/vscode-R NOTICES AND INFORMATION
%% insane NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright © 2015 Nicolas Bevacqua
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF insane NOTICES AND INFORMATION
%% Ionic documentation NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright Drifty Co. http://drifty.com/.

1
build/.cachesalt Normal file
View file

@ -0,0 +1 @@
2019-08-30T20:24:23.714Z

107
build/.nativeignore Normal file
View file

@ -0,0 +1,107 @@
# cleanup rules for native node modules, .gitignore style
nan/**
*/node_modules/nan/**
fsevents/binding.gyp
fsevents/fsevents.cc
fsevents/build/**
fsevents/src/**
fsevents/test/**
!fsevents/**/*.node
vscode-sqlite3/binding.gyp
vscode-sqlite3/benchmark/**
vscode-sqlite3/cloudformation/**
vscode-sqlite3/deps/**
vscode-sqlite3/test/**
vscode-sqlite3/build/**
vscode-sqlite3/src/**
!vscode-sqlite3/build/Release/*.node
oniguruma/binding.gyp
oniguruma/build/**
oniguruma/src/**
oniguruma/deps/**
!oniguruma/build/Release/*.node
!oniguruma/src/*.js
windows-mutex/binding.gyp
windows-mutex/build/**
windows-mutex/src/**
!windows-mutex/**/*.node
native-keymap/binding.gyp
native-keymap/build/**
native-keymap/src/**
native-keymap/deps/**
!native-keymap/build/Release/*.node
native-is-elevated/binding.gyp
native-is-elevated/build/**
native-is-elevated/src/**
native-is-elevated/deps/**
!native-is-elevated/build/Release/*.node
native-watchdog/binding.gyp
native-watchdog/build/**
native-watchdog/src/**
!native-watchdog/build/Release/*.node
spdlog/binding.gyp
spdlog/build/**
spdlog/deps/**
spdlog/src/**
spdlog/test/**
!spdlog/build/Release/*.node
jschardet/dist/**
windows-foreground-love/binding.gyp
windows-foreground-love/build/**
windows-foreground-love/src/**
!windows-foreground-love/**/*.node
windows-process-tree/binding.gyp
windows-process-tree/build/**
windows-process-tree/src/**
!windows-process-tree/**/*.node
keytar/binding.gyp
keytar/build/**
keytar/src/**
keytar/script/**
keytar/node_modules/**
!keytar/**/*.node
node-pty/binding.gyp
node-pty/build/**
node-pty/src/**
node-pty/tools/**
node-pty/deps/**
!node-pty/build/Release/*.exe
!node-pty/build/Release/*.dll
!node-pty/build/Release/*.node
nsfw/binding.gyp
nsfw/build/**
nsfw/src/**
nsfw/openpa/**
nsfw/includes/**
!nsfw/build/Release/*.node
!nsfw/**/*.a
vsda/build/**
vsda/ci/**
vsda/src/**
vsda/.gitignore
vsda/binding.gyp
vsda/README.md
vsda/targets
!vsda/build/Release/vsda.node
vscode-windows-ca-certs/**/*
!vscode-windows-ca-certs/package.json
!vscode-windows-ca-certs/**/*.node
node-addon-api/**/*

View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -e
cd $BUILD_STAGINGDIRECTORY
mkdir extraction
cd extraction
git clone --depth 1 https://github.com/Microsoft/vscode-extension-telemetry.git
git clone --depth 1 https://github.com/Microsoft/vscode-chrome-debug-core.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
cd ..
rm -rf extraction

View file

@ -1,18 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
function yarnInstall(packageName: string): void {
cp.execSync(`yarn add --no-lockfile ${packageName}`);
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
yarnInstall(url);
});

View file

@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View file

@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View file

@ -64,7 +64,7 @@ interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
mooncakeUrl?: string;
hash: string;
sha256hash: string;
size: number;
@ -151,13 +151,6 @@ async function publish(commit: string, quality: string, platform: string, type:
const queuedBy = process.env['BUILD_QUEUEDBY']!;
const sourceBranch = process.env['BUILD_SOURCEBRANCH']!;
const isReleased = (
// Insiders: nightly build from master
(quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)) ||
// Exploration: any build from electron-4.0.x branch
(quality === 'exploration' && /^electron-4.0.x$|^refs\/heads\/electron-4.0.x$/.test(sourceBranch))
);
console.log('Publishing...');
console.log('Quality:', quality);
@ -167,7 +160,6 @@ async function publish(commit: string, quality: string, platform: string, type:
console.log('Version:', version);
console.log('Commit:', commit);
console.log('Is Update:', isUpdate);
console.log('Is Released:', isReleased);
console.log('File:', file);
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
@ -187,40 +179,18 @@ async function publish(commit: string, quality: string, platform: string, type:
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, quality);
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
const blobExists = await doesAssetExist(blobService, quality, blobName);
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises: Array<Promise<void>> = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
if (promises.length === 0) {
if (blobExists) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await Promise.all(promises);
await uploadBlob(blobService, quality, blobName, file);
console.log('Blobs successfully uploaded.');
@ -232,7 +202,6 @@ async function publish(commit: string, quality: string, platform: string, type:
platform: platform,
type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
@ -249,7 +218,7 @@ async function publish(commit: string, quality: string, platform: string, type:
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: config.frozen ? false : isReleased,
isReleased: false,
sourceBranch,
queuedBy,
assets: [] as Array<Asset>,
@ -268,11 +237,6 @@ async function publish(commit: string, quality: string, platform: string, type:
}
function main(): void {
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
console.warn('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
return;
}
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {

View file

@ -0,0 +1,109 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { DocumentClient } from 'documentdb';
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
function doRelease(commit: string, quality: string): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.isReleased = true;
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully updated.');
c();
});
});
});
}
return update();
}
async function release(commit: string, quality: string): Promise<void> {
const config = await getConfig(quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
await doRelease(commit, quality);
}
function env(name: string): string {
const result = process.env[name];
if (!result) {
throw new Error(`Skipping release due to missing env: ${name}`);
}
return result;
}
async function main(): Promise<void> {
const commit = env('BUILD_SOURCEVERSION');
const quality = env('VSCODE_QUALITY');
await release(commit, quality);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View file

@ -36,7 +36,6 @@ export interface IVersionAccessor extends IApplicationAccessor {
enum Platform {
WIN_32 = 'win32-ia32',
WIN_64 = 'win32-x64',
LINUX_32 = 'linux-ia32',
LINUX_64 = 'linux-x64',
MAC_OS = 'darwin-x64'
}
@ -147,6 +146,10 @@ async function ensureVersionAndSymbols(options: IOptions) {
// Check version does not exist
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
if (!Array.isArray(versions.app_versions)) {
throw new Error(`Unexpected response: ${JSON.stringify(versions)}`);
}
if (versions.app_versions.some(v => v.version === options.versions.code)) {
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
return;
@ -185,13 +188,17 @@ const hockeyAppToken = process.argv[3];
const is64 = process.argv[4] === 'x64';
const hockeyAppId = process.argv[5];
if (process.argv.length !== 6) {
throw new Error(`HockeyApp: Unexpected number of arguments. Got ${process.argv}`);
}
let platform: Platform;
if (process.platform === 'darwin') {
platform = Platform.MAC_OS;
} else if (process.platform === 'win32') {
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
} else {
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
platform = Platform.LINUX_64;
}
// Create version and upload symbols in HockeyApp
@ -212,7 +219,9 @@ if (repository && codeVersion && electronVersion && (product.quality === 'stable
}).then(() => {
console.log('HockeyApp: done');
}).catch(error => {
console.error(`HockeyApp: error (${error})`);
console.error(`HockeyApp: error ${error} (AppID: ${hockeyAppId})`);
return process.exit(1);
});
} else {
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);

View file

@ -0,0 +1,171 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as url from 'url';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { DocumentClient, RetrievedDocument } from 'documentdb';
function log(...args: any[]) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
}
function error(...args: any[]) {
console.error(...[`[${new Date().toISOString()}]`, ...args]);
}
if (process.argv.length < 3) {
error('Usage: node sync-mooncake.js <quality>');
process.exit(-1);
}
interface Build extends RetrievedDocument {
assets: Asset[];
}
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function _update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.assets = [
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
asset
];
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
if (err) { return e(err); }
log('Build successfully updated.');
c();
});
});
});
}
return _update();
}
async function sync(commit: string, quality: string): Promise<void> {
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = `dbs/builds/colls/${quality}`;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
const build = await new Promise<Build>((c, e) => {
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
c(results[0] as Build);
});
});
log(`Found build for ${commit}, with ${build.assets.length} assets`);
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
for (const asset of build.assets) {
try {
const blobPath = url.parse(asset.url).path;
if (!blobPath) {
throw new Error(`Failed to parse URL: ${asset.url}`);
}
const blobName = blobPath.replace(/^\/\w+\//, '');
log(`Found ${blobName}`);
if (asset.mooncakeUrl) {
log(` Already in Mooncake ✔️`);
continue;
}
const readStream = blobService.createReadStream(quality, blobName, undefined!);
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(blobPath),
cacheControl: 'max-age=31536000, public'
}
};
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
log(` Uploading to Mooncake...`);
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await updateBuild(commit, quality, asset.platform, asset.type, asset);
log(` Done ✔️`);
} catch (err) {
error(err);
}
}
log(`All done ✔️`);
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
error('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const quality = process.argv[2];
sync(commit, quality).catch(err => {
error(err);
process.exit(1);
});
}
main();

View file

@ -0,0 +1,72 @@
[
{
"eventPrefix": "typescript-language-features/",
"sourceDirs": [
"../../s/extensions/typescript-language-features"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "git/",
"sourceDirs": [
"../../s/extensions/git"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "extension-telemetry/",
"sourceDirs": [
"vscode-extension-telemetry"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "vscode-markdown/",
"sourceDirs": [
"../../s/extensions/markdown-language-features"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "html-language-features/",
"sourceDirs": [
"../../s/extensions/html-language-features",
"vscode-html-languageservice"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "json-language-features/",
"sourceDirs": [
"../../s/extensions/json-language-features",
"vscode-json-languageservice"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "ms-vscode.node2/",
"sourceDirs": [
"vscode-chrome-debug-core",
"vscode-node-debug2"
],
"excludedDirs": [],
"applyEndpoints": true,
"patchDebugEvents": true
},
{
"eventPrefix": "ms-vscode.node/",
"sourceDirs": [
"vscode-chrome-debug-core",
"vscode-node-debug"
],
"excludedDirs": [],
"applyEndpoints": true,
"patchDebugEvents": true
}
]

View file

@ -4,28 +4,31 @@ steps:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- script: |
yarn
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: ne(variables['CacheRestored'], 'true')
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
@ -46,4 +49,4 @@ steps:
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
condition: succeededOrFailed()

View file

@ -1,39 +1,96 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(VSO_PAT)
machine github.com
login vscode
password $(VSCODE_MIXIN_PASSWORD)
password $(github-distro-mixin-password)
EOF
yarn
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" yarn gulp -- mixin
yarn gulp -- hygiene
yarn monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- vscode-darwin-min
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- upload-vscode-sourcemaps
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-darwin-min-ci
displayName: Build
- script: |
@ -42,6 +99,31 @@ steps:
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
@ -69,31 +151,12 @@ steps:
- script: |
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive \
"VSCode-darwin-$(VSCODE_QUALITY).zip" \
$VERSION \
true \
../VSCode-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# upload configuration
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- upload-vscode-configuration
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0

View file

@ -0,0 +1,36 @@
#!/usr/bin/env bash
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
darwin \
archive \
"VSCode-darwin-$VSCODE_QUALITY.zip" \
$VERSION \
true \
../VSCode-darwin.zip
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
# publish Remote Extension Host
node build/azure-pipelines/common/publish.js \
"$VSCODE_QUALITY" \
server-darwin \
archive-unsigned \
"vscode-server-darwin.zip" \
$VERSION \
true \
../vscode-server-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" x64 "$VSCODE_HOCKEYAPP_ID_MACOS"
# upload configuration
yarn gulp upload-vscode-configuration

View file

@ -0,0 +1,42 @@
trigger:
branches:
include: ['master', 'release/*']
pr:
branches:
include: ['master', 'release/*']
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
git fetch distro
# Push master branch into oss/master
git push distro origin/master:refs/heads/oss/master
# Push every release branch into oss/release
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
git merge $(node -p "require('./package.json').distro")
displayName: Sync & Merge Distro

View file

@ -0,0 +1,36 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger: none
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-6.0.x
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration

View file

@ -12,28 +12,31 @@ steps:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- script: |
yarn
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: ne(variables['CacheRestored'], 'true')
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
@ -46,9 +49,12 @@ steps:
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
condition: succeededOrFailed()

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,3 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View file

@ -0,0 +1,116 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: Docker@1
displayName: 'Pull image'
inputs:
azureSubscriptionEndpoint: 'vscode-builds-subscription'
azureContainerRegistry: vscodehub.azurecr.io
command: 'Run an image'
imageName: 'vscode-linux-build-agent:$(VSCODE_ARCH)'
containerCommand: uname
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
CHILD_CONCURRENCY=1 ./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/prebuild.sh
displayName: Prebuild
- script: |
set -e
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/build.sh
displayName: Build
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View file

@ -1,118 +1,138 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(VSO_PAT)
machine github.com
login vscode
password $(VSCODE_MIXIN_PASSWORD)
password $(github-distro-mixin-password)
EOF
CHILD_CONCURRENCY=1 yarn
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
npm run gulp -- hygiene
npm run monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# xvfb seems to be crashing often, let's make sure it's always up
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-x64-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-x64-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-x64-min-ci
displayName: Build
- script: |
set -e
service xvfb start
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
ARCH="$(VSCODE_ARCH)"
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Publish tarball
PLATFORM_LINUX="linux-$(VSCODE_ARCH)"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
# Publish DEB
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
PLATFORM_DEB="linux-deb-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$(VSCODE_ARCH).tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/linux/publish.sh
displayName: Publish
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
artifactName: snap-x64
targetPath: .build/linux/snap-tarball
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View file

@ -0,0 +1,60 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish tarball
PLATFORM_LINUX="linux-x64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Publish DEB
yarn gulp "vscode-linux-x64-build-deb"
PLATFORM_DEB="linux-deb-x64"
DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
yarn gulp "vscode-linux-x64-build-rpm"
PLATFORM_RPM="linux-rpm-x64"
RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
yarn gulp "vscode-linux-x64-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)

View file

@ -5,12 +5,18 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
artifactName: snap-x64
targetPath: .build/linux/snap-tarball
- script: |
@ -25,14 +31,13 @@ steps:
# Define variables
REPO="$(pwd)"
ARCH="$(VSCODE_ARCH)"
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
SNAP_ROOT="$REPO/.build/linux/snap/x64"
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
@ -41,10 +46,9 @@ steps:
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View file

@ -5,25 +5,15 @@
'use strict';
const gulp = require('gulp');
const json = require('gulp-json-editor');
const buffer = require('gulp-buffer');
const filter = require('gulp-filter');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const pkg = require('../package.json');
const cp = require('child_process');
const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
gulp.task('mixin', function () {
const repo = process.env['VSCODE_MIXIN_REPO'];
if (!repo) {
console.log('Missing VSCODE_MIXIN_REPO, skipping mixin');
return;
}
function main() {
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
@ -31,32 +21,12 @@ gulp.task('mixin', function () {
return;
}
const url = `https://github.com/${repo}.git`;
cp.execSync(`git config user.email "vscode@microsoft.com"`);
cp.execSync(`git config user.name "VSCode"`);
fancyLog(ansiColors.blue('[mixin]'), 'Add distro remote');
cp.execSync(`git remote add distro ${url}`);
fancyLog(ansiColors.blue('[mixin]'), 'Add fetch distro sources');
cp.execSync(`git fetch distro`);
fancyLog(ansiColors.blue('[mixin]'), `Merge ${pkg.distro} from distro`);
try {
cp.execSync(`git merge ${pkg.distro}`);
} catch (err) {
fancyLog(ansiColors.red('[mixin] ❌'), `Failed to merge ${pkg.distro} from distro. Please proceed with manual merge to fix the build.`);
throw err;
}
const productJsonFilter = filter('product.json', { restore: true });
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
return vfs
.src(`quality/${quality}/**`, { base: `quality/${quality}` })
.pipe(filter(function (f) { return !f.isDirectory(); }))
.pipe(filter(f => !f.isDirectory()))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
@ -65,5 +35,7 @@ gulp.task('mixin', function () {
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
return f;
}))
.pipe(gulp.dest('.'));
});
.pipe(vfs.dest('.'));
}
main();

View file

@ -1,65 +1,150 @@
resources:
containers:
- container: vscode-x64
image: joaomoreno/vscode-linux-build-agent:x64
- container: vscode-ia32
image: joaomoreno/vscode-linux-build-agent:ia32
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
endpoint: VSCodeHub
- container: snapcraft
image: snapcore/snapcraft
image: snapcore/snapcraft:stable
jobs:
- job: Compile
pool:
vmImage: 'Ubuntu-16.04'
container: vscode-x64
steps:
- template: product-compile.yml
- job: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: x64
dependsOn:
- Compile
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: ia32
dependsOn:
- Compile
steps:
- template: win32/product-build-win32.yml
- job: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: vscode-x64
dependsOn:
- Compile
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: snapcraft
dependsOn: Linux
steps:
- template: linux/snap-build-linux.yml
- job: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: ia32
container: vscode-ia32
VSCODE_ARCH: armhf
dependsOn:
- Compile
steps:
- template: linux/product-build-linux.yml
- template: linux/product-build-linux-multiarch.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: arm64
dependsOn:
- Compile
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: alpine
dependsOn:
- Compile
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WEB'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
dependsOn:
- Compile
steps:
- template: web/product-build-web.yml
- job: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
pool:
vmImage: macOS 10.13
dependsOn:
- Compile
steps:
- template: darwin/product-build-darwin.yml
- template: darwin/product-build-darwin.yml
- job: Release
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: 'Ubuntu-16.04'
dependsOn:
- Windows
- Windows32
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxAlpine
- macOS
steps:
- template: release.yml
- job: Mooncake
pool:
vmImage: 'Ubuntu-16.04'
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
dependsOn:
- Windows
- Windows32
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxAlpine
- LinuxWeb
- macOS
steps:
- template: sync-mooncake.yml
trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master

View file

@ -0,0 +1,133 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene --skip-tslint
yarn gulp tslint
yarn monaco-compile-check
displayName: Run hygiene, tslint and monaco compile checks
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp minify-vscode-reh
yarn gulp minify-vscode-reh-web
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))

View file

@ -0,0 +1,2 @@
node_modules/
*.js

View file

@ -0,0 +1,43 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as cp from 'child_process';
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
} catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t: string) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View file

@ -0,0 +1,83 @@
# Publish @types/vscode for each release
trigger:
branches:
include: ['refs/tags/*']
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
- bash: |
# Install build dependencies
(cd build && yarn)
node build/azure-pipelines/publish-types/check-version.js
displayName: Check version
- bash: |
git config --global user.email "vscode@microsoft.com"
git config --global user.name "VSCode"
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
cd DefinitelyTyped
git diff --color | cat
git add -A
git status
git checkout -b "vscode-types-$TAG_VERSION"
git commit -m "VS Code $TAG_VERSION Extension API"
git push origin "vscode-types-$TAG_VERSION"
displayName: Push update to DefinitelyTyped
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@octref, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
https://slack.com/api/chat.postMessage
displayName: Send message on Slack

View file

@ -0,0 +1,73 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as cp from 'child_process';
import * as path from 'path';
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
} catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath: string, tag: string) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content: string, tag: string) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag: string) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View file

@ -0,0 +1,22 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/release.js

View file

@ -0,0 +1,24 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"

View file

@ -0,0 +1,57 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const path = require('path');
const es = require('event-stream');
const azure = require('gulp-azure-storage');
const vfs = require('vinyl-fs');
const util = require('../lib/util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
const fetch = function (base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
};
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = fetch('out-vscode-min'); // client source-maps only
sources.push(vs);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(fetch(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
}
main();

View file

@ -0,0 +1,106 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# - script: |
# set -e
# yarn postinstall
# displayName: Run postinstall scripts
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-web-min-ci
displayName: Build
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh
displayName: Publish

View file

@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish Web Client
WEB_BUILD_NAME="vscode-web"
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
BUILD="$ROOT/$WEB_BUILD_NAME"
PACKAGEJSON="$BUILD/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/vscode-web.tar.*
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"

View file

@ -4,32 +4,36 @@ steps:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- powershell: |
yarn
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: ne(variables['CacheRestored'], 'true')
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
yarn electron
- script: |
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks

View file

@ -1,51 +1,134 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
mkdir .build -ea 0
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com`npassword $(VSO_PAT)`nmachine github.com`nlogin vscode`npassword $(VSCODE_MIXIN_PASSWORD)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { yarn }
exec { npm run gulp -- mixin }
exec { npm run gulp -- hygiene }
exec { npm run monaco-compile-check }
exec { node build/azure-pipelines/common/installDistro.js }
exec { node build/lib/builtInExtensions.js }
exec { yarn --frozen-lockfile }
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min" }
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
name: build
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "electron-$(VSCODE_ARCH)" }
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-code-helper" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
name: test
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
@ -113,38 +196,18 @@ steps:
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(ESRP_AUTH_CERTIFICATE) -AuthCertificateKey $(ESRP_AUTH_CERTIFICATE_KEY)
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-system-setup" "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-user" setup "VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $UserExe }
# publish hockeyapp symbols
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
exec { node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId }
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_HOCKEYAPP_TOKEN = "$(vscode-hockeyapp-token)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'

View file

@ -0,0 +1,37 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$Arch = "$env:VSCODE_ARCH"
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$Arch\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
$ServerName = "vscode-server-win32-$Arch"
$Server = "$Root\$ServerName"
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
$Build = "$Root\VSCode-win32-$Arch"
# Create server archive
exec { Rename-Item -Path $LegacyServer -NewName $ServerName }
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
# publish hockeyapp symbols
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }

View file

@ -1,7 +1,7 @@
[
{
"name": "ms-vscode.node-debug",
"version": "1.33.3",
"version": "1.38.8",
"repo": "https://github.com/Microsoft/vscode-node-debug",
"metadata": {
"id": "b6ded8fb-a0a0-4c1c-acbd-ab2a3bc995a6",
@ -16,7 +16,7 @@
},
{
"name": "ms-vscode.node-debug2",
"version": "1.33.0",
"version": "1.39.2",
"repo": "https://github.com/Microsoft/vscode-node-debug2",
"metadata": {
"id": "36d19e17-7569-4841-a001-947eb18602b2",
@ -31,7 +31,7 @@
},
{
"name": "ms-vscode.references-view",
"version": "0.0.26",
"version": "0.0.30",
"repo": "https://github.com/Microsoft/vscode-reference-view",
"metadata": {
"id": "dc489f46-520d-4556-ae85-1f9eab3c412d",

View file

@ -10,7 +10,7 @@ const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600 });
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true } });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();

View file

@ -1,91 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const https = require("https");
const fs = require("fs");
const path = require("path");
const cp = require("child_process");
function ensureDir(filepath) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options, destination) {
ensureDir(path.dirname(destination));
return new Promise((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str) {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str) {
return Buffer.from(str, 'base64').toString('utf8');
}
function downloadInExternalProcess(options) {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise((c, e) => {
const child = cp.fork(__filename, [MARKER_ARGUMENT, base64encode(JSON.stringify(options))], {
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
});
let stderr = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
}
else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
exports.downloadInExternalProcess = downloadInExternalProcess;
function _downloadInExternalProcess() {
let options;
try {
options = JSON.parse(base64decode(process.argv[3]));
}
catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

View file

@ -1,111 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as https from 'https';
import * as fs from 'fs';
import * as path from 'path';
import * as cp from 'child_process';
function ensureDir(filepath: string) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options: https.RequestOptions, destination: string): Promise<void> {
ensureDir(path.dirname(destination));
return new Promise<void>((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str: string): string {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str: string): string {
return Buffer.from(str, 'base64').toString('utf8');
}
export interface IDownloadRequestOptions {
host: string;
path: string;
}
export interface IDownloadOptions {
requestOptions: IDownloadRequestOptions;
destinationPath: string;
}
export function downloadInExternalProcess(options: IDownloadOptions): Promise<void> {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise<void>((c, e) => {
const child = cp.fork(
__filename,
[MARKER_ARGUMENT, base64encode(JSON.stringify(options))],
{
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
}
);
let stderr: Buffer[] = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
} else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
function _downloadInExternalProcess() {
let options: IDownloadOptions;
try {
options = JSON.parse(base64decode(process.argv[3]));
} catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

View file

@ -5,14 +5,12 @@
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const task = require('./lib/task');
const compilation = require('./lib/compilation');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
// Full compile, including nls and inline sources in sourcemaps, for build
const compileClientBuildTask = task.define('compile-client-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
// All Build
const compileBuildTask = task.define('compile-build', task.parallel(compileClientBuildTask, compileExtensionsBuildTask));
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
gulp.task(compileBuildTask);
exports.compileBuildTask = compileBuildTask;

View file

@ -41,12 +41,7 @@ var editorEntryPoints = [
];
var editorResources = [
'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
];
var BUNDLED_FILE_HEADER = [
@ -89,9 +84,6 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')

View file

@ -21,7 +21,7 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const _ = require('underscore');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@ -36,16 +36,16 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const tsconfig = require(absolutePath);
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const overrideOptions = {};
overrideOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const out = path.join(root, 'out');
const baseUrl = getBaseUrl(out);
@ -62,12 +62,12 @@ const tasks = compilations.map(function (tsconfigFile) {
function createPipeline(build, emitError) {
const reporter = createReporter();
tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
return function () {
const pipeline = function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
@ -97,15 +97,19 @@ const tasks = compilations.map(function (tsconfigFile) {
return es.duplex(input, output);
};
}
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
// add src-stream for project files
pipeline.tsProjectSrc = () => {
return compilation.src(srcOpts);
};
return pipeline;
}
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
const input = pipeline.tsProjectSrc();
return input
.pipe(pipeline())
@ -114,8 +118,8 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
const input = pipeline.tsProjectSrc();
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
return watchInput
.pipe(util.incremental(pipeline, input))
@ -124,7 +128,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(true, true);
const input = gulp.src(src, srcOpts);
const input = pipeline.tsProjectSrc();
return input
.pipe(pipeline())
@ -135,11 +139,7 @@ const tasks = compilations.map(function (tsconfigFile) {
gulp.task(compileTask);
gulp.task(watchTask);
return {
compileTask: compileTask,
watchTask: watchTask,
compileBuildTask: compileBuildTask
};
return { compileTask, watchTask, compileBuildTask };
});
const compileExtensionsTask = task.define('compile-extensions', task.parallel(...tasks.map(t => t.compileTask)));
@ -150,5 +150,17 @@ const watchExtensionsTask = task.define('watch-extensions', task.parallel(...tas
gulp.task(watchExtensionsTask);
exports.watchExtensionsTask = watchExtensionsTask;
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.parallel(...tasks.map(t => t.compileBuildTask)));
const compileExtensionsBuildLegacyTask = task.define('compile-extensions-build-legacy', task.parallel(...tasks.map(t => t.compileBuildTask)));
gulp.task(compileExtensionsBuildLegacyTask);
// Azure Pipelines
const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimraf('.build/extensions'));
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
cleanExtensionsBuildTask,
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
));
gulp.task(compileExtensionsBuildTask);
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;

View file

@ -17,6 +17,7 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const task = require('./lib/task');
/**
* Hygiene works by creating cascading subsets of all our files and
@ -50,12 +51,14 @@ const indentationFilter = [
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
@ -68,7 +71,7 @@ const indentationFilter = [
'!**/yarn-error.log',
// except multiple specific folders
'!**/octicons/**',
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
@ -87,6 +90,7 @@ const indentationFilter = [
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js'
@ -118,7 +122,9 @@ const copyrightFilter = [
'!resources/completions/**',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*'
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js'
];
const eslintFilter = [
@ -129,24 +135,46 @@ const eslintFilter = [
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!**/test/**'
];
const tslintFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
const tslintBaseFilter = [
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript/test/colorize-fixtures/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts'
];
const tslintCoreFilter = [
'src/**/*.ts',
'test/**/*.ts',
'!extensions/**/*.ts',
'!test/automation/**',
'!test/smoke/**',
...tslintBaseFilter
];
const tslintExtensionsFilter = [
'extensions/**/*.ts',
'!src/**/*.ts',
'!test/**/*.ts',
'test/automation/**/*.ts',
...tslintBaseFilter
];
const tslintHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
...tslintBaseFilter
];
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@ -163,17 +191,63 @@ gulp.task('eslint', () => {
});
gulp.task('tslint', () => {
const options = { emitError: true };
return es.merge([
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report(options));
// Core: include type information (required by certain rules like no-nodejs-globals)
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintCoreFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint', program: tslint.Linter.createProgram('src/tsconfig.json') }))
.pipe(gulptslint.default.report({ emitError: true })),
// Exenstions: do not include type information
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintExtensionsFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report({ emitError: true }))
]).pipe(es.through());
});
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function() {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
});
gulp.task(checkPackageJSONTask);
function hygiene(some) {
let errorCount = 0;
const productJson = es.through(function (file) {
const product = JSON.parse(file.contents.toString('utf8'));
if (product.extensionsGallery) {
console.error('product.json: Contains "extensionsGallery"');
errorCount++;
}
this.emit('data', file);
});
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
@ -257,17 +331,25 @@ function hygiene(some) {
input = some;
}
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
.pipe(productJsonFilter.restore)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
.pipe(formatting)
.pipe(tsl);
let typescript = result
.pipe(filter(tslintHygieneFilter))
.pipe(formatting);
if (!process.argv.some(arg => arg === '--skip-tslint')) {
typescript = typescript.pipe(tsl);
}
const javascript = result
.pipe(filter(eslintFilter))
@ -340,7 +422,7 @@ function createGitIndexVinyls(paths) {
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', () => hygiene());
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {

145
build/gulpfile.reh.js Normal file
View file

@ -0,0 +1,145 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const task = require('./lib/task');
const vfs = require('vinyl-fs');
const flatmap = require('gulp-flatmap');
const gunzip = require('gulp-gunzip');
const untar = require('gulp-untar');
const File = require('vinyl');
const fs = require('fs');
const remote = require('gulp-remote-retry-src');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const cp = require('child_process');
const REPO_ROOT = path.dirname(__dirname);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32', pkgTarget: 'node8-win-x86' },
{ platform: 'win32', arch: 'x64', pkgTarget: 'node8-win-x64' },
{ platform: 'darwin', arch: null, pkgTarget: 'node8-macos-x64' },
{ platform: 'linux', arch: 'ia32', pkgTarget: 'node8-linux-x86' },
{ platform: 'linux', arch: 'x64', pkgTarget: 'node8-linux-x64' },
{ platform: 'linux', arch: 'armhf', pkgTarget: 'node8-linux-armv7' },
{ platform: 'linux', arch: 'arm64', pkgTarget: 'node8-linux-arm64' },
{ platform: 'linux', arch: 'alpine', pkgTarget: 'node8-linux-alpine' },
];
const noop = () => { return Promise.resolve(); };
gulp.task('vscode-reh-win32-ia32-min', noop);
gulp.task('vscode-reh-win32-x64-min', noop);
gulp.task('vscode-reh-darwin-min', noop);
gulp.task('vscode-reh-linux-x64-min', noop);
gulp.task('vscode-reh-linux-armhf-min', noop);
gulp.task('vscode-reh-linux-arm64-min', noop);
gulp.task('vscode-reh-linux-alpine-min', noop);
gulp.task('vscode-reh-web-win32-ia32-min', noop);
gulp.task('vscode-reh-web-win32-x64-min', noop);
gulp.task('vscode-reh-web-darwin-min', noop);
gulp.task('vscode-reh-web-linux-x64-min', noop);
gulp.task('vscode-reh-web-linux-alpine-min', noop);
function getNodeVersion() {
const yarnrc = fs.readFileSync(path.join(REPO_ROOT, 'remote', '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
const nodeVersion = getNodeVersion();
BUILD_TARGETS.forEach(({ platform, arch }) => {
if (platform === 'darwin') {
arch = 'x64';
}
gulp.task(task.define(`node-${platform}-${arch}`, () => {
const nodePath = path.join('.build', 'node', `v${nodeVersion}`, `${platform}-${arch}`);
if (!fs.existsSync(nodePath)) {
util.rimraf(nodePath);
return nodejs(platform, arch)
.pipe(vfs.dest(nodePath));
}
return Promise.resolve(null);
}));
});
const defaultNodeTask = gulp.task(`node-${process.platform}-${process.arch}`);
if (defaultNodeTask) {
gulp.task(task.define('node', defaultNodeTask));
}
function nodejs(platform, arch) {
if (arch === 'ia32') {
arch = 'x86';
}
if (platform === 'win32') {
return remote(`/dist/v${nodeVersion}/win-${arch}/node.exe`, { base: 'https://nodejs.org' })
.pipe(rename('node.exe'));
}
if (arch === 'alpine') {
const contents = cp.execSync(`docker run --rm node:${nodeVersion}-alpine /bin/sh -c 'cat \`which node\`'`, { maxBuffer: 100 * 1024 * 1024, encoding: 'buffer' });
return es.readArray([new File({ path: 'node', contents, stat: { mode: parseInt('755', 8) } })]);
}
if (platform === 'darwin') {
arch = 'x64';
}
if (arch === 'armhf') {
arch = 'armv7l';
}
return remote(`/dist/v${nodeVersion}/node-v${nodeVersion}-${platform}-${arch}.tar.gz`, { base: 'https://nodejs.org' })
.pipe(flatmap(stream => stream.pipe(gunzip()).pipe(untar())))
.pipe(filter('**/node'))
.pipe(util.setExecutableBit('**'))
.pipe(rename('node'));
}
function mixinServer(watch) {
const packageJSONPath = path.join(path.dirname(__dirname), 'package.json');
function exec(cmdLine) {
console.log(cmdLine);
cp.execSync(cmdLine, { stdio: "inherit" });
}
function checkout() {
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString());
exec('git fetch distro');
exec(`git checkout ${packageJSON['distro']} -- src/vs/server resources/server`);
exec('git reset HEAD src/vs/server resources/server');
}
checkout();
if (watch) {
console.log('Enter watch mode (observing package.json)');
const watcher = fs.watch(packageJSONPath);
watcher.addListener('change', () => {
try {
checkout();
} catch (e) {
console.log(e);
}
});
}
return Promise.resolve();
}
gulp.task(task.define('mixin-server', () => mixinServer(false)));
gulp.task(task.define('mixin-server-watch', () => mixinServer(true)));

View file

@ -21,7 +21,6 @@ const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const task = require('./lib/task');
const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile');
const common = require('./lib/optimize');
const root = path.dirname(__dirname);
@ -31,10 +30,11 @@ const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const { config } = require('./lib/electron');
const createAsar = require('./lib/asar').createAsar;
const minimist = require('minimist');
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
@ -47,9 +47,9 @@ const nodeModules = ['electron', 'original-fs']
// Build
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
buildfile.base,
buildfile.workbench,
buildfile.workbenchDesktop,
buildfile.code
]);
@ -62,20 +62,21 @@ const vscodeResources = [
'out-build/bootstrap-amd.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/**/*.{svg,png,html}',
'!out-build/vs/code/browser/**/*.html',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/electron-browser/webview-pre.js',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/platform/files/**/*.exe',
'out-build/vs/platform/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
@ -83,140 +84,33 @@ const vscodeResources = [
'!**/test/**'
];
const BUNDLED_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
task.parallel(
util.rimraf('out-vscode'),
compileBuildTask
),
util.rimraf('out-vscode'),
common.optimizeTask({
src: 'out-build',
entryPoints: vscodeEntryPoints,
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined
})
));
gulp.task(optimizeVSCodeTask);
const optimizeIndexJSTask = task.define('optimize-index-js', task.series(
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
optimizeVSCodeTask,
util.rimraf('out-vscode-min'),
() => {
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js');
const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents);
}
));
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
task.parallel(
util.rimraf('out-vscode-min'),
optimizeIndexJSTask
),
},
common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)
));
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["bat", "cmd"], 'resources/darwin/bat.icns'),
darwinBundleDocumentType(["bowerrc"], 'resources/darwin/bower.icns'),
darwinBundleDocumentType(["c", "h"], 'resources/darwin/c.icns'),
darwinBundleDocumentType(["config", "editorconfig", "gitattributes", "gitconfig", "gitignore", "ini"], 'resources/darwin/config.icns'),
darwinBundleDocumentType(["cc", "cpp", "cxx", "hh", "hpp", "hxx"], 'resources/darwin/cpp.icns'),
darwinBundleDocumentType(["cs", "csx"], 'resources/darwin/csharp.icns'),
darwinBundleDocumentType(["css"], 'resources/darwin/css.icns'),
darwinBundleDocumentType(["go"], 'resources/darwin/go.icns'),
darwinBundleDocumentType(["asp", "aspx", "cshtml", "htm", "html", "jshtm", "jsp", "phtml", "shtml"], 'resources/darwin/html.icns'),
darwinBundleDocumentType(["jade"], 'resources/darwin/jade.icns'),
darwinBundleDocumentType(["jav", "java"], 'resources/darwin/java.icns'),
darwinBundleDocumentType(["js", "jscsrc", "jshintrc", "mjs"], 'resources/darwin/javascript.icns'),
darwinBundleDocumentType(["json"], 'resources/darwin/json.icns'),
darwinBundleDocumentType(["less"], 'resources/darwin/less.icns'),
darwinBundleDocumentType(["markdown", "md", "mdoc", "mdown", "mdtext", "mdtxt", "mdwn", "mkd", "mkdn"], 'resources/darwin/markdown.icns'),
darwinBundleDocumentType(["php"], 'resources/darwin/php.icns'),
darwinBundleDocumentType(["ps1", "psd1", "psm1"], 'resources/darwin/powershell.icns'),
darwinBundleDocumentType(["py"], 'resources/darwin/python.icns'),
darwinBundleDocumentType(["gemspec", "rb"], 'resources/darwin/ruby.icns'),
darwinBundleDocumentType(["scss"], 'resources/darwin/sass.icns'),
darwinBundleDocumentType(["bash", "bash_login", "bash_logout", "bash_profile", "bashrc", "profile", "rhistory", "rprofile", "sh", "zlogin", "zlogout", "zprofile", "zsh", "zshenv", "zshrc"], 'resources/darwin/shell.icns'),
darwinBundleDocumentType(["sql"], 'resources/darwin/sql.icns'),
darwinBundleDocumentType(["ts"], 'resources/darwin/typescript.icns'),
darwinBundleDocumentType(["tsx", "jsx"], 'resources/darwin/react.icns'),
darwinBundleDocumentType(["vue"], 'resources/darwin/vue.icns'),
darwinBundleDocumentType(["ascx", "csproj", "dtd", "wxi", "wxl", "wxs", "xml", "xaml"], 'resources/darwin/xml.icns'),
darwinBundleDocumentType(["eyaml", "eyml", "yaml", "yml"], 'resources/darwin/yaml.icns'),
darwinBundleDocumentType(["clj", "cljs", "cljx", "clojure", "code-workspace", "coffee", "ctp", "dockerfile", "dot", "edn", "fs", "fsi", "fsscript", "fsx", "handlebars", "hbs", "lua", "m", "makefile", "ml", "mli", "pl", "pl6", "pm", "pm6", "pod", "pp", "properties", "psgi", "pug", "r", "rs", "rt", "svg", "svgz", "t", "txt", "vb", "xcodeproj", "xcworkspace"], 'resources/darwin/default.icns')
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
gulp.task(minifyVSCodeTask);
/**
* Compute checksums for some files.
@ -262,25 +156,22 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const out = sourceFolderName;
const checksums = computeChecksums(out, [
'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css',
'vs/workbench/workbench.desktop.main.js',
'vs/workbench/workbench.desktop.main.css',
'vs/code/electron-browser/workbench/workbench.html',
'vs/code/electron-browser/workbench/workbench.js'
]);
const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
.pipe(util.setExecutableBit(['**/*.sh']));
const root = path.resolve(path.join(__dirname, '..'));
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
const sources = es.merge(src, ext.packageExtensionsStream({
sourceMappingURLBase: sourceMappingURLBase
}));
const sources = es.merge(src, extensions)
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional
const quality = product.quality;
if (quality && quality !== 'stable') {
@ -313,32 +204,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
const depsSrc = [
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
// @ts-ignore JSON checking: dependencies is optional
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
];
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
const deps = gulp.src(depsSrc, { base: '.', dot: true })
const root = path.resolve(path.join(__dirname, '..'));
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('vscode-sqlite3', ['binding.gyp', 'benchmark/**', 'cloudformation/**', 'deps/**', 'test/**', 'build/**', 'src/**'], ['build/Release/*.node']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['build/Release/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['build/Release/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['build/Release/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(util.cleanNodeModule('vscode-windows-ca-certs', ['**/*'], ['package.json', '**/*.node']))
.pipe(util.cleanNodeModule('node-addon-api', ['**/*']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
let all = es.merge(
@ -346,6 +219,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
productJsonStream,
license,
api,
telemetry,
sources,
deps
);
@ -395,9 +269,17 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
if (platform === 'linux') {
result = es.merge(result, gulp.src('resources/completions/bash/code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = product.applicationName; })));
result = es.merge(result, gulp.src('resources/completions/zsh/_code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = '_' + product.applicationName; })));
}
if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
@ -408,14 +290,19 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort))
.pipe(replace('@@PRODNAME@@', product.nameLong))
.pipe(replace('@@VERSION@@', version))
.pipe(replace('@@COMMIT@@', commit))
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(replace('@@DATAFOLDER@@', product.dataFolderName))
.pipe(replace('@@QUALITY@@', quality))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
.pipe(rename(product.nameShort + '.VisualElementsManifest.xml')));
} else if (platform === 'linux') {
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
.pipe(replace('@@PRODNAME@@', product.nameLong))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('bin/' + product.applicationName)));
}
@ -454,13 +341,18 @@ BUILD_TARGETS.forEach(buildTarget => {
const sourceFolderName = `out-vscode${dashed(minified)}`;
const destinationFolderName = `VSCode${dashed(platform)}${dashed(arch)}`;
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
task.parallel(
minified ? minifyVSCodeTask : optimizeVSCodeTask,
util.rimraf(path.join(buildRoot, destinationFolderName))
),
const vscodeTaskCI = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
util.rimraf(path.join(buildRoot, destinationFolderName)),
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
));
gulp.task(vscodeTaskCI);
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
compileBuildTask,
compileExtensionsBuildTask,
minified ? minifyVSCodeTask : optimizeVSCodeTask,
vscodeTaskCI
));
gulp.task(vscodeTask);
});
});
@ -489,10 +381,12 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task(task.define(
'vscode-translations-push',
task.series(
compileBuildTask,
compileExtensionsBuildTask,
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = '.build/extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@ -508,10 +402,12 @@ gulp.task(task.define(
gulp.task(task.define(
'vscode-translations-export',
task.series(
compileBuildTask,
compileExtensionsBuildTask,
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*';
const pathToExtensions = '.build/extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@ -545,32 +441,6 @@ gulp.task('vscode-translations-import', function () {
}));
});
// Sourcemaps
gulp.task('upload-vscode-sourcemaps', () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
return es.merge(vs, extensionsOut, extensionsDist)
.pipe(es.through(function (data) {
// debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
});
// This task is only run for the MacOS build
const generateVSCodeConfigurationTask = task.define('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => {
@ -663,4 +533,3 @@ function getSettingsSearchBuildId(packageJson) {
throw new Error('Could not determine build number: ' + e.toString());
}
}

View file

@ -23,7 +23,7 @@ const commit = util.getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'amd64', arm: 'armhf', arm64: "arm64" }[arch];
}
function prepareDebPackage(arch) {
@ -42,7 +42,8 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@ -54,11 +55,13 @@ function prepareDebPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('usr/share/bash-completion/completions/code'));
const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('usr/share/bash-completion/completions/' + product.applicationName));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('usr/share/zsh/vendor-completions/_' + product.applicationName));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@ -94,7 +97,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code);
return all.pipe(vfs.dest(destination));
};
@ -114,7 +117,7 @@ function getRpmBuildPath(rpmArch) {
}
function getRpmPackageArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'x86_64', arm: 'armhf', arm64: "arm64" }[arch];
}
function prepareRpmPackage(arch) {
@ -132,6 +135,7 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
@ -144,11 +148,13 @@ function prepareRpmPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('BUILD/usr/share/bash-completion/completions/' + product.applicationName));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('BUILD/usr/share/zsh/site-functions/_' + product.applicationName));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@ -171,7 +177,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
};
@ -199,21 +205,25 @@ function prepareSnapPackage(arch) {
const destination = getSnapBuildPath(arch);
return function () {
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
@ -234,11 +244,11 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
return shell.task(`cd ${snapBuildPath} && snapcraft`);
}
const BUILD_TARGETS = [
{ arch: 'ia32' },
{ arch: 'x64' },
{ arch: 'arm' },
{ arch: 'arm64' },

View file

@ -3,11 +3,14 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
.monaco-workbench .activitybar > .content .monaco-action-bar .action-label.extensions {
-webkit-mask: url('extensions-dark.svg') no-repeat 50% 50%;
}
'use strict';
.extensions .split-view-view .panel-header .count-badge-wrapper {
position: absolute;
right: 12px;
}
const gulp = require('gulp');
const noop = () => { return Promise.resolve(); };
gulp.task('minify-vscode-web', noop);
gulp.task('vscode-web', noop);
gulp.task('vscode-web-min', noop);
gulp.task('vscode-web-ci', noop);
gulp.task('vscode-web-min-ci', noop);

View file

@ -25,7 +25,7 @@ const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive')
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) {
@ -135,12 +135,17 @@ function copyInnoUpdater(arch) {
};
}
function patchInnoUpdater(arch) {
function updateIcon(executablePath) {
return cb => {
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
rcedit(executablePath, { icon }, cb);
};
}
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), patchInnoUpdater('ia32'))));
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), patchInnoUpdater('x64'))));
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), updateIcon(path.join(buildPath('ia32'), 'tools', 'inno_updater.exe')))));
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), updateIcon(path.join(buildPath('x64'), 'tools', 'inno_updater.exe')))));
// CodeHelper.exe icon
gulp.task(task.define('vscode-win32-ia32-code-helper', task.series(updateIcon(path.join(buildPath('ia32'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));
gulp.task(task.define('vscode-win32-x64-code-helper', task.series(updateIcon(path.join(buildPath('x64'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));

View file

@ -11,7 +11,6 @@ const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb");
const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
@ -22,14 +21,7 @@ const watch = require('./watch');
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
let options = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@ -38,15 +30,14 @@ function getTypeScriptCompilerOptions(src) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
return options;
}
function createCompile(src, build, emitError) {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
return function (token) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
@ -57,30 +48,28 @@ function createCompile(src, build, emitError) {
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(compilation(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: overrideOptions.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@ -95,8 +84,8 @@ exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src' });
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
@ -112,7 +101,6 @@ class MonacoGenerator {
this._executeSoonTimer = null;
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
@ -122,26 +110,10 @@ class MonacoGenerator {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
fs.watchFile(filePath, () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
readFileSync(moduleId, filePath) {
@ -151,11 +123,9 @@ class MonacoGenerator {
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
fs.watchFile(monacodts.RECIPE_PATH, () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
}
}
_executeSoon() {
@ -168,9 +138,6 @@ class MonacoGenerator {
this.execute();
}, 20);
}
dispose() {
this._watchers.forEach(watcher => watcher.close());
}
_run() {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {

View file

@ -12,27 +12,21 @@ import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import ts = require('typescript');
const watch = require('./watch');
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) {
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
let options: ts.CompilerOptions = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@ -41,18 +35,17 @@ function getTypeScriptCompilerOptions(src: string) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
return options;
}
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
return function (token?: util.ICancellationToken) {
function pipeline(token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
@ -65,39 +58,31 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(compilation(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: opts.sourceRoot
sourceRoot: overrideOptions.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@ -115,11 +100,8 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
return function () {
const compile = createCompile('src', build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
);
const watchSrc = watch('src/**', { base: 'src' });
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
generator.execute();
@ -137,7 +119,6 @@ class MonacoGenerator {
private readonly _isWatch: boolean;
public readonly stream: NodeJS.ReadWriteStream;
private readonly _watchers: fs.FSWatcher[];
private readonly _watchedFiles: { [filePath: string]: boolean; };
private readonly _fsProvider: monacodts.FSProvider;
private readonly _declarationResolver: monacodts.DeclarationResolver;
@ -145,7 +126,6 @@ class MonacoGenerator {
constructor(isWatch: boolean) {
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
@ -156,26 +136,10 @@ class MonacoGenerator {
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
fs.watchFile(filePath, () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
public readFileSync(moduleId: string, filePath: string): Buffer {
@ -186,11 +150,9 @@ class MonacoGenerator {
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
fs.watchFile(monacodts.RECIPE_PATH, () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
}
}
@ -206,10 +168,6 @@ class MonacoGenerator {
}, 20);
}
public dispose(): void {
this._watchers.forEach(watcher => watcher.close());
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {

View file

@ -2,29 +2,115 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const fs = require('fs');
const path = require('path');
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const json = require("gulp-json-editor");
const _ = require("underscore");
const util = require("./util");
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
exports.getElectronVersion = getElectronVersion;
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
exports.config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["bat", "cmd"], 'resources/darwin/bat.icns'),
darwinBundleDocumentType(["bowerrc"], 'resources/darwin/bower.icns'),
darwinBundleDocumentType(["c", "h"], 'resources/darwin/c.icns'),
darwinBundleDocumentType(["config", "editorconfig", "gitattributes", "gitconfig", "gitignore", "ini"], 'resources/darwin/config.icns'),
darwinBundleDocumentType(["cc", "cpp", "cxx", "hh", "hpp", "hxx"], 'resources/darwin/cpp.icns'),
darwinBundleDocumentType(["cs", "csx"], 'resources/darwin/csharp.icns'),
darwinBundleDocumentType(["css"], 'resources/darwin/css.icns'),
darwinBundleDocumentType(["go"], 'resources/darwin/go.icns'),
darwinBundleDocumentType(["asp", "aspx", "cshtml", "htm", "html", "jshtm", "jsp", "phtml", "shtml"], 'resources/darwin/html.icns'),
darwinBundleDocumentType(["jade"], 'resources/darwin/jade.icns'),
darwinBundleDocumentType(["jav", "java"], 'resources/darwin/java.icns'),
darwinBundleDocumentType(["js", "jscsrc", "jshintrc", "mjs"], 'resources/darwin/javascript.icns'),
darwinBundleDocumentType(["json"], 'resources/darwin/json.icns'),
darwinBundleDocumentType(["less"], 'resources/darwin/less.icns'),
darwinBundleDocumentType(["markdown", "md", "mdoc", "mdown", "mdtext", "mdtxt", "mdwn", "mkd", "mkdn"], 'resources/darwin/markdown.icns'),
darwinBundleDocumentType(["php"], 'resources/darwin/php.icns'),
darwinBundleDocumentType(["ps1", "psd1", "psm1"], 'resources/darwin/powershell.icns'),
darwinBundleDocumentType(["py"], 'resources/darwin/python.icns'),
darwinBundleDocumentType(["gemspec", "rb"], 'resources/darwin/ruby.icns'),
darwinBundleDocumentType(["scss"], 'resources/darwin/sass.icns'),
darwinBundleDocumentType(["bash", "bash_login", "bash_logout", "bash_profile", "bashrc", "profile", "rhistory", "rprofile", "sh", "zlogin", "zlogout", "zprofile", "zsh", "zshenv", "zshrc"], 'resources/darwin/shell.icns'),
darwinBundleDocumentType(["sql"], 'resources/darwin/sql.icns'),
darwinBundleDocumentType(["ts"], 'resources/darwin/typescript.icns'),
darwinBundleDocumentType(["tsx", "jsx"], 'resources/darwin/react.icns'),
darwinBundleDocumentType(["vue"], 'resources/darwin/vue.icns'),
darwinBundleDocumentType(["ascx", "csproj", "dtd", "wxi", "wxl", "wxs", "xml", "xaml"], 'resources/darwin/xml.icns'),
darwinBundleDocumentType(["eyaml", "eyml", "yaml", "yml"], 'resources/darwin/yaml.icns'),
darwinBundleDocumentType(["clj", "cljs", "cljx", "clojure", "code-workspace", "coffee", "ctp", "dockerfile", "dot", "edn", "fs", "fsi", "fsscript", "fsx", "handlebars", "hbs", "lua", "m", "makefile", "ml", "mli", "pl", "pl6", "pm", "pm6", "pod", "pp", "properties", "psgi", "pug", "r", "rs", "rt", "svg", "svgz", "t", "txt", "vb", "xcodeproj", "xcworkspace"], 'resources/darwin/default.icns')
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, exports.config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch) {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) {
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `v${version}`;
process.exit(isUpToDate ? 0 : 1);
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

127
build/lib/electron.ts Normal file
View file

@ -0,0 +1,127 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as json from 'gulp-json-editor';
import * as _ from 'underscore';
import * as util from './util';
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
export function getElectronVersion(): string {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
export const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["bat", "cmd"], 'resources/darwin/bat.icns'),
darwinBundleDocumentType(["bowerrc"], 'resources/darwin/bower.icns'),
darwinBundleDocumentType(["c", "h"], 'resources/darwin/c.icns'),
darwinBundleDocumentType(["config", "editorconfig", "gitattributes", "gitconfig", "gitignore", "ini"], 'resources/darwin/config.icns'),
darwinBundleDocumentType(["cc", "cpp", "cxx", "hh", "hpp", "hxx"], 'resources/darwin/cpp.icns'),
darwinBundleDocumentType(["cs", "csx"], 'resources/darwin/csharp.icns'),
darwinBundleDocumentType(["css"], 'resources/darwin/css.icns'),
darwinBundleDocumentType(["go"], 'resources/darwin/go.icns'),
darwinBundleDocumentType(["asp", "aspx", "cshtml", "htm", "html", "jshtm", "jsp", "phtml", "shtml"], 'resources/darwin/html.icns'),
darwinBundleDocumentType(["jade"], 'resources/darwin/jade.icns'),
darwinBundleDocumentType(["jav", "java"], 'resources/darwin/java.icns'),
darwinBundleDocumentType(["js", "jscsrc", "jshintrc", "mjs"], 'resources/darwin/javascript.icns'),
darwinBundleDocumentType(["json"], 'resources/darwin/json.icns'),
darwinBundleDocumentType(["less"], 'resources/darwin/less.icns'),
darwinBundleDocumentType(["markdown", "md", "mdoc", "mdown", "mdtext", "mdtxt", "mdwn", "mkd", "mkdn"], 'resources/darwin/markdown.icns'),
darwinBundleDocumentType(["php"], 'resources/darwin/php.icns'),
darwinBundleDocumentType(["ps1", "psd1", "psm1"], 'resources/darwin/powershell.icns'),
darwinBundleDocumentType(["py"], 'resources/darwin/python.icns'),
darwinBundleDocumentType(["gemspec", "rb"], 'resources/darwin/ruby.icns'),
darwinBundleDocumentType(["scss"], 'resources/darwin/sass.icns'),
darwinBundleDocumentType(["bash", "bash_login", "bash_logout", "bash_profile", "bashrc", "profile", "rhistory", "rprofile", "sh", "zlogin", "zlogout", "zprofile", "zsh", "zshenv", "zshrc"], 'resources/darwin/shell.icns'),
darwinBundleDocumentType(["sql"], 'resources/darwin/sql.icns'),
darwinBundleDocumentType(["ts"], 'resources/darwin/typescript.icns'),
darwinBundleDocumentType(["tsx", "jsx"], 'resources/darwin/react.icns'),
darwinBundleDocumentType(["vue"], 'resources/darwin/vue.icns'),
darwinBundleDocumentType(["ascx", "csproj", "dtd", "wxi", "wxl", "wxs", "xml", "xaml"], 'resources/darwin/xml.icns'),
darwinBundleDocumentType(["eyaml", "eyml", "yaml", "yml"], 'resources/darwin/yaml.icns'),
darwinBundleDocumentType(["clj", "cljs", "cljx", "clojure", "code-workspace", "coffee", "ctp", "dockerfile", "dot", "edn", "fs", "fsi", "fsscript", "fsx", "handlebars", "hbs", "lua", "m", "makefile", "ml", "mli", "pl", "pl6", "pm", "pm6", "pod", "pp", "properties", "psgi", "pug", "r", "rs", "rt", "svg", "svgz", "t", "txt", "vb", "xcodeproj", "xcworkspace"], 'resources/darwin/default.icns')
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch): Promise<void> {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

View file

@ -13,7 +13,7 @@ const File = require("vinyl");
const vsce = require("vsce");
const stats_1 = require("./stats");
const util2 = require("./util");
const remote = require("gulp-remote-src");
const remote = require("gulp-remote-retry-src");
const vzip = require('gulp-vinyl-zip');
const filter = require("gulp-filter");
const rename = require("gulp-rename");
@ -23,17 +23,26 @@ const buffer = require('gulp-buffer');
const json = require("gulp-json-editor");
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const root = path.resolve(path.join(__dirname, '..', '..'));
function fromLocal(extensionPath, sourceMappingURLBase) {
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function fromLocal(extensionPath) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
}
else {
return fromLocalNormal(extensionPath);
}
const input = fs.existsSync(webpackFilename)
? fromLocalWebpack(extensionPath)
: fromLocalNormal(extensionPath);
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true });
return input
.pipe(tmLanguageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
}
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
function fromLocalWebpack(extensionPath) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
@ -78,7 +87,7 @@ function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
const webpackDone = (err, stats) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
@ -92,7 +101,7 @@ function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
@ -104,22 +113,14 @@ function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
es.merge(...webpackStreams, patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
@ -180,34 +181,12 @@ exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
const builtInExtensions = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders) {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
}
else {
const fn = streamProviders.shift();
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
function packageExtensionsStream(optsIn) {
const opts = optsIn || {};
function packageLocalExtensionsStream() {
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
@ -215,21 +194,22 @@ function packageExtensionsStream(optsIn) {
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
const marketplaceExtensions = () => es.merge(...builtInExtensions
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.map(extension => {
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(nodeModules, ...localExtensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageLocalExtensionsStream = packageLocalExtensionsStream;
function packageMarketplaceExtensionsStream() {
const extensions = builtInExtensions.map(extension => {
return fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
}));
return sequence([localExtensions, localExtensionDependencies, marketplaceExtensions])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
});
return es.merge(extensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageExtensionsStream = packageExtensionsStream;
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;

View file

@ -13,7 +13,7 @@ import * as File from 'vinyl';
import * as vsce from 'vsce';
import { createStatsStream } from './stats';
import * as util2 from './util';
import remote = require('gulp-remote-src');
import remote = require('gulp-remote-retry-src');
const vzip = require('gulp-vinyl-zip');
import filter = require('gulp-filter');
import rename = require('gulp-rename');
@ -23,19 +23,30 @@ const buffer = require('gulp-buffer');
import json = require('gulp-json-editor');
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const root = path.resolve(path.join(__dirname, '..', '..'));
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
function fromLocal(extensionPath: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
} else {
return fromLocalNormal(extensionPath);
}
const input = fs.existsSync(webpackFilename)
? fromLocalWebpack(extensionPath)
: fromLocalNormal(extensionPath);
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true });
return input
.pipe(tmLanguageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f: File) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
}
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
function fromLocalWebpack(extensionPath: string): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
@ -91,7 +102,7 @@ function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string |
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
const webpackDone = (err: any, stats: any) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
@ -123,24 +134,16 @@ function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string |
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
es.merge(...webpackStreams, patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
@ -210,17 +213,10 @@ export function fromMarketplace(extensionName: string, version: string, metadata
.pipe(packageJsonFilter.restore);
}
interface IPackageExtensionsOptions {
/**
* Set to undefined to package all of them.
*/
desiredExtensions?: string[];
sourceMappingURLBase?: string;
}
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
@ -234,33 +230,7 @@ interface IBuiltInExtension {
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders: { (): Stream }[]): Stream {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
} else {
const fn = streamProviders.shift()!;
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
const opts = optsIn || {};
export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
@ -268,26 +238,24 @@ export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): Nod
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
});
const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
const marketplaceExtensions = () => es.merge(
...builtInExtensions
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.map(extension => {
return fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})
);
return sequence([localExtensions, localExtensionDependencies, marketplaceExtensions])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
return es.merge(nodeModules, ...localExtensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
const extensions = builtInExtensions.map(extension => {
return fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(extensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}

View file

@ -176,6 +176,7 @@ class XLF {
this.buffer.push(line.toString());
}
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
@ -248,7 +249,6 @@ XLF.parse = function (xlfString) {
});
});
};
exports.XLF = XLF;
class Limiter {
constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@ -580,7 +580,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@ -603,7 +603,7 @@ function createXlfFilesForExtensions() {
}
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@ -906,8 +906,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
_coreAndExtensionResources.push(...json.workbench);
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
});
@ -1080,7 +1080,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
if (externalExtensionId) {

View file

@ -38,10 +38,6 @@
"name": "vs/workbench/contrib/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeinset",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/callHierarchy",
"project": "vscode-workbench"
@ -110,6 +106,14 @@
"name": "vs/workbench/contrib/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userData",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/remote",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/relauncher",
"project": "vscode-workbench"
@ -170,6 +174,10 @@
"name": "vs/workbench/contrib/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench"
@ -178,6 +186,10 @@
"name": "vs/workbench/contrib/outline",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
@ -222,10 +234,6 @@
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files2",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/integrity",
"project": "vscode-workbench"
@ -234,6 +242,10 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/lifecycle",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@ -259,7 +271,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/workspace",
"name": "vs/workbench/services/workspaces",
"project": "vscode-workbench"
},
{
@ -273,6 +285,14 @@
{
"name": "vs/workbench/services/preferences",
"project": "vscode-preferences"
},
{
"name": "vs/workbench/services/notification",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/userData",
"project": "vscode-workbench"
}
]
}

View file

@ -24,8 +24,8 @@ function log(message: any, ...rest: any[]): void {
}
export interface Language {
id: string; // laguage id, e.g. zh-tw, de
translationId?: string; // language id used in translation tools, e.g zh-hant, de (optional, if not set, the id is used)
id: string; // language id, e.g. zh-tw, de
translationId?: string; // language id used in translation tools, e.g. zh-hant, de (optional, if not set, the id is used)
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
}
@ -701,7 +701,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
return _xlf;
}
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path);
@ -721,7 +721,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@ -1045,8 +1045,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@ -1245,7 +1245,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
@ -1370,4 +1370,4 @@ function decodeEntities(value: string): string {
function pseudify(message: string) {
return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
}
}

15
build/lib/node.js Normal file
View file

@ -0,0 +1,15 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const fs = require("fs");
const root = path.dirname(path.dirname(__dirname));
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)[1];
const node = process.platform === 'win32' ? 'node.exe' : 'node';
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${process.platform}-${process.arch}`, node);
console.log(nodePath);

16
build/lib/node.ts Normal file
View file

@ -0,0 +1,16 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as path from 'path';
import * as fs from 'fs';
const root = path.dirname(path.dirname(__dirname));
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)![1];
const node = process.platform === 'win32' ? 'node.exe' : 'node';
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${process.platform}-${process.arch}`, node);
console.log(nodePath);

View file

@ -5,6 +5,7 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano");
@ -17,7 +18,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const path = require("path");
const pump = require("pump");
const uglifyes = require("uglify-es");
const terser = require("terser");
const VinylFile = require("vinyl");
const bundle = require("./bundle");
const i18n_1 = require("./i18n");
@ -111,12 +112,17 @@ function toBundleStream(src, bundledFileHeader, bundles) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
}));
}
const DEFAULT_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
function optimizeTask(opts) {
const src = opts.src;
const entryPoints = opts.entryPoints;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header;
const bundledFileHeader = opts.header || DEFAULT_FILE_HEADER;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
return function () {
@ -127,6 +133,14 @@ function optimizeTask(opts) {
if (err || !result) {
return bundlesStream.emit('error', JSON.stringify(err));
}
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
}
catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
@ -162,6 +176,39 @@ function optimizeTask(opts) {
};
}
exports.optimizeTask = optimizeTask;
function inlineAmdImages(src, result) {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@ -192,7 +239,7 @@ function uglifyWithCopyrights() {
return false;
};
};
const minify = composer(uglifyes);
const minify = composer(terser);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View file

@ -6,6 +6,7 @@
'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
@ -19,7 +20,7 @@ import * as ansiColors from 'ansi-colors';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as uglifyes from 'uglify-es';
import * as terser from 'terser';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
@ -154,11 +155,15 @@ export interface IOptimizeTaskOpts {
/**
* (basically the Copyright treatment)
*/
header: string;
header?: string;
/**
* (emit bundleInfo.json file)
*/
bundleInfo: boolean;
/**
* replace calls to `registerAndGetAmdImageURL` with data uris
*/
inlineAmdImages: boolean;
/**
* (out folder name)
*/
@ -169,12 +174,18 @@ export interface IOptimizeTaskOpts {
languages?: Language[];
}
const DEFAULT_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const src = opts.src;
const entryPoints = opts.entryPoints;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header;
const bundledFileHeader = opts.header || DEFAULT_FILE_HEADER;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
@ -186,6 +197,14 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
} catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
@ -230,6 +249,42 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
};
}
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
}
@ -267,7 +322,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
};
};
const minify = (composer as any)(uglifyes);
const minify = (composer as any)(terser);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View file

@ -31,6 +31,7 @@ function extractEditor(options) {
let compilerOptions;
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
delete tsConfig.extends;
}
else {
compilerOptions = tsConfig.compilerOptions;
@ -42,6 +43,7 @@ function extractEditor(options) {
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
@ -128,7 +130,7 @@ function createESMSourcesAndResources2(options) {
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@ -248,35 +250,37 @@ function transportCSS(module, enqueue, write) {
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
function _rewriteOrInlineUrls(contents, forceBase64) {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
enqueue(imagePath);
return url;
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
});
}
function _replaceURL(contents, replacer) {

View file

@ -35,6 +35,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
let compilerOptions: { [key: string]: any };
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
delete tsConfig.extends;
} else {
compilerOptions = tsConfig.compilerOptions;
}
@ -49,6 +50,8 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
@ -151,7 +154,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@ -287,40 +290,41 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string {
return _replaceURL(contents, (url) => {
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
enqueue(imagePath);
return url;
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
});
}

View file

@ -32,7 +32,7 @@ async function _doExecute(task) {
// Always invoke as if it were a callback task
return new Promise((resolve, reject) => {
if (task.length === 1) {
// this is a calback task
// this is a callback task
task((err) => {
if (err) {
return reject(err);

View file

@ -54,7 +54,7 @@ async function _doExecute(task: Task): Promise<void> {
// Always invoke as if it were a callback task
return new Promise((resolve, reject) => {
if (task.length === 1) {
// this is a calback task
// this is a callback task
task((err) => {
if (err) {
return reject(err);
@ -122,4 +122,4 @@ export function define(name: string, task: Task): Task {
task.taskName = name;
task.displayName = name;
return task;
}
}

View file

@ -27,14 +27,14 @@ suite('XLF Parser Tests', () => {
});
test('JSON file source path to Transifex resource match', () => {
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/textfile', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View file

@ -39,7 +39,7 @@ suite('XLF Parser Tests', () => {
base = { name: 'vs/base', project: editorProject },
code = { name: 'vs/code', project: workbenchProject },
workbenchParts = { name: 'vs/workbench/contrib/html', project: workbenchProject },
workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject },
workbenchServices = { name: 'vs/workbench/services/textfile', project: workbenchProject },
workbench = { name: 'vs/workbench', project: workbenchProject};
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
@ -48,7 +48,7 @@ suite('XLF Parser Tests', () => {
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/contrib/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/services/textfile/node/testFileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View file

@ -14,6 +14,17 @@ var ShakeLevel;
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function toStringShakeLevel(shakeLevel) {
switch (shakeLevel) {
case 0 /* Files */:
return 'Files (0)';
case 1 /* InnerFile */:
return 'InnerFile (1)';
case 2 /* ClassMembers */:
return 'ClassMembers (2)';
}
}
exports.toStringShakeLevel = toStringShakeLevel;
function printDiagnostics(diagnostics) {
for (const diag of diagnostics) {
let result = '';
@ -394,6 +405,7 @@ function markNodes(languageService, options) {
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose' // TODO: keeping all `dispose` methods
|| /^_(.*)Brand$/.test(memberName || '') // TODO: keeping all members ending with `Brand`...
) {
enqueue_black(member);
}
@ -513,10 +525,6 @@ function generateResult(languageService, shakeLevel) {
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);

View file

@ -17,6 +17,17 @@ export const enum ShakeLevel {
ClassMembers = 2
}
export function toStringShakeLevel(shakeLevel: ShakeLevel): string {
switch(shakeLevel) {
case ShakeLevel.Files:
return 'Files (0)';
case ShakeLevel.InnerFile:
return 'InnerFile (1)';
case ShakeLevel.ClassMembers:
return 'ClassMembers (2)';
}
}
export interface ITreeShakingOptions {
/**
* The full path to the root where sources are.
@ -513,6 +524,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|| memberName === 'toJSON'
|| memberName === 'toString'
|| memberName === 'dispose'// TODO: keeping all `dispose` methods
|| /^_(.*)Brand$/.test(memberName || '') // TODO: keeping all members ending with `Brand`...
) {
enqueue_black(member);
}
@ -642,10 +654,6 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
// keep method
continue;
}
if (/^_(.*)Brand$/.test(member.name.getText())) {
// TODO: keep all members ending with `Brand`...
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;

View file

@ -0,0 +1,45 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
class AbstractGlobalsRuleWalker extends Lint.RuleWalker {
constructor(file, program, opts, _config) {
super(file, opts);
this.program = program;
this._config = _config;
}
visitIdentifier(node) {
if (this.getDisallowedGlobals().some(disallowedGlobal => disallowedGlobal === node.text)) {
if (this._config.allowed && this._config.allowed.some(allowed => allowed === node.text)) {
return; // override
}
const checker = this.program.getTypeChecker();
const symbol = checker.getSymbolAtLocation(node);
if (symbol) {
const declarations = symbol.declarations;
if (Array.isArray(declarations) && symbol.declarations.some(declaration => {
if (declaration) {
const parent = declaration.parent;
if (parent) {
const sourceFile = parent.getSourceFile();
if (sourceFile) {
const fileName = sourceFile.fileName;
if (fileName && fileName.indexOf(this.getDefinitionPattern()) >= 0) {
return true;
}
}
}
}
return false;
})) {
this.addFailureAtNode(node, `Cannot use global '${node.text}' in '${this._config.target}'`);
}
}
}
super.visitIdentifier(node);
}
}
exports.AbstractGlobalsRuleWalker = AbstractGlobalsRuleWalker;

View file

@ -0,0 +1,57 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
interface AbstractGlobalsRuleConfig {
target: string;
allowed: string[];
}
export abstract class AbstractGlobalsRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, private program: ts.Program, opts: Lint.IOptions, private _config: AbstractGlobalsRuleConfig) {
super(file, opts);
}
protected abstract getDisallowedGlobals(): string[];
protected abstract getDefinitionPattern(): string;
visitIdentifier(node: ts.Identifier) {
if (this.getDisallowedGlobals().some(disallowedGlobal => disallowedGlobal === node.text)) {
if (this._config.allowed && this._config.allowed.some(allowed => allowed === node.text)) {
return; // override
}
const checker = this.program.getTypeChecker();
const symbol = checker.getSymbolAtLocation(node);
if (symbol) {
const declarations = symbol.declarations;
if (Array.isArray(declarations) && symbol.declarations.some(declaration => {
if (declaration) {
const parent = declaration.parent;
if (parent) {
const sourceFile = parent.getSourceFile();
if (sourceFile) {
const fileName = sourceFile.fileName;
if (fileName && fileName.indexOf(this.getDefinitionPattern()) >= 0) {
return true;
}
}
}
}
return false;
})) {
this.addFailureAtNode(node, `Cannot use global '${node.text}' in '${this._config.target}'`);
}
}
}
super.visitIdentifier(node);
}
}

View file

@ -0,0 +1,34 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
const abstractGlobalsRule_1 = require("./abstractGlobalsRule");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
const configs = this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoDOMGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
exports.Rule = Rule;
class NoDOMGlobalsRuleWalker extends abstractGlobalsRule_1.AbstractGlobalsRuleWalker {
getDefinitionPattern() {
return 'lib.dom.d.ts';
}
getDisallowedGlobals() {
// intentionally not complete
return [
"window",
"document",
"HTMLElement"
];
}
}

View file

@ -0,0 +1,45 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { AbstractGlobalsRuleWalker } from './abstractGlobalsRule';
interface NoDOMGlobalsRuleConfig {
target: string;
allowed: string[];
}
export class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
const configs = <NoDOMGlobalsRuleConfig[]>this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoDOMGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
class NoDOMGlobalsRuleWalker extends AbstractGlobalsRuleWalker {
getDefinitionPattern(): string {
return 'lib.dom.d.ts';
}
getDisallowedGlobals(): string[] {
// intentionally not complete
return [
"window",
"document",
"HTMLElement"
];
}
}

View file

@ -0,0 +1,41 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
const abstractGlobalsRule_1 = require("./abstractGlobalsRule");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
const configs = this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoNodejsGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
exports.Rule = Rule;
class NoNodejsGlobalsRuleWalker extends abstractGlobalsRule_1.AbstractGlobalsRuleWalker {
getDefinitionPattern() {
return '@types/node';
}
getDisallowedGlobals() {
// https://nodejs.org/api/globals.html#globals_global_objects
return [
"NodeJS",
"Buffer",
"__dirname",
"__filename",
"clearImmediate",
"exports",
"global",
"module",
"process",
"setImmediate"
];
}
}

View file

@ -0,0 +1,52 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { AbstractGlobalsRuleWalker } from './abstractGlobalsRule';
interface NoNodejsGlobalsConfig {
target: string;
allowed: string[];
}
export class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
const configs = <NoNodejsGlobalsConfig[]>this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoNodejsGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
class NoNodejsGlobalsRuleWalker extends AbstractGlobalsRuleWalker {
getDefinitionPattern(): string {
return '@types/node';
}
getDisallowedGlobals(): string[] {
// https://nodejs.org/api/globals.html#globals_global_objects
return [
"NodeJS",
"Buffer",
"__dirname",
"__filename",
"clearImmediate",
"exports",
"global",
"module",
"process",
"setImmediate"
];
}
}

Some files were not shown because too many files have changed in this diff Show more