Merge branch 'master' into issue/98942-search-clear-exclude

This commit is contained in:
Rob Lourens 2020-07-03 18:01:20 -07:00 committed by GitHub
commit 225cac5572
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
859 changed files with 30112 additions and 21866 deletions

121
.devcontainer/Dockerfile Normal file
View file

@ -0,0 +1,121 @@
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-10
ARG TARGET_DISPLAY=":1"
# VNC options
ARG MAX_VNC_RESOLUTION=1920x1080x16
ARG TARGET_VNC_RESOLUTION=1920x1080
ARG TARGET_VNC_DPI=72
ARG TARGET_VNC_PORT=5901
ARG VNC_PASSWORD="vscode"
# noVNC (VNC web client) options
ARG INSTALL_NOVNC="true"
ARG NOVNC_VERSION=1.1.0
ARG TARGET_NOVNC_PORT=6080
ARG WEBSOCKETIFY_VERSION=0.9.0
# Firefox is useful for testing things like browser launch events, but optional
ARG INSTALL_FIREFOX="false"
# Expected non-root username from base image
ARG USERNAME=node
# Core environment variables for X11, VNC, and fluxbox
ENV DBUS_SESSION_BUS_ADDRESS="autolaunch:" \
MAX_VNC_RESOLUTION="${MAX_VNC_RESOLUTION}" \
VNC_RESOLUTION="${TARGET_VNC_RESOLUTION}" \
VNC_DPI="${TARGET_VNC_DPI}" \
VNC_PORT="${TARGET_VNC_PORT}" \
NOVNC_PORT="${TARGET_NOVNC_PORT}" \
DISPLAY="${TARGET_DISPLAY}" \
LANG="en_US.UTF-8" \
LANGUAGE="en_US.UTF-8" \
VISUAL="nano" \
EDITOR="nano"
# Configure apt and install packages
RUN apt-get update \
&& export DEBIAN_FRONTEND=noninteractive \
#
# Install the Cascadia Code fonts - https://github.com/microsoft/cascadia-code
&& curl -sSL https://github.com/microsoft/cascadia-code/releases/download/v2004.30/CascadiaCode_2004.30.zip -o /tmp/cascadia-fonts.zip \
&& unzip /tmp/cascadia-fonts.zip -d /tmp/cascadia-fonts \
&& mkdir -p /usr/share/fonts/truetype/cascadia \
&& mv /tmp/cascadia-fonts/ttf/* /usr/share/fonts/truetype/cascadia/ \
&& rm -rf /tmp/cascadia-fonts.zip /tmp/cascadia-fonts \
#
# Install X11, fluxbox and VS Code dependencies
&& apt-get -y install --no-install-recommends \
xvfb \
x11vnc \
fluxbox \
dbus-x11 \
x11-utils \
x11-xserver-utils \
xdg-utils \
fbautostart \
xterm \
eterm \
gnome-terminal \
gnome-keyring \
seahorse \
nautilus \
libx11-dev \
libxkbfile-dev \
libsecret-1-dev \
libnotify4 \
libnss3 \
libxss1 \
libasound2 \
xfonts-base \
xfonts-terminus \
fonts-noto \
fonts-wqy-microhei \
fonts-droid-fallback \
vim-tiny \
nano \
#
# [Optional] Install noVNC
&& if [ "${INSTALL_NOVNC}" = "true" ]; then \
mkdir -p /usr/local/novnc \
&& curl -sSL https://github.com/novnc/noVNC/archive/v${NOVNC_VERSION}.zip -o /tmp/novnc-install.zip \
&& unzip /tmp/novnc-install.zip -d /usr/local/novnc \
&& cp /usr/local/novnc/noVNC-${NOVNC_VERSION}/vnc_lite.html /usr/local/novnc/noVNC-${NOVNC_VERSION}/index.html \
&& rm /tmp/novnc-install.zip \
&& curl -sSL https://github.com/novnc/websockify/archive/v${WEBSOCKETIFY_VERSION}.zip -o /tmp/websockify-install.zip \
&& unzip /tmp/websockify-install.zip -d /usr/local/novnc \
&& apt-get -y install --no-install-recommends python-numpy \
&& ln -s /usr/local/novnc/websockify-${WEBSOCKETIFY_VERSION} /usr/local/novnc/noVNC-${NOVNC_VERSION}/utils/websockify \
&& rm /tmp/websockify-install.zip; \
fi \
#
# [Optional] Install Firefox
&& if [ "${INSTALL_FIREFOX}" = "true" ]; then \
apt-get -y install --no-install-recommends firefox-esr; \
fi \
#
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY bin/init-dev-container.sh /usr/local/share/
COPY bin/set-resolution /usr/local/bin/
COPY fluxbox/* /root/.fluxbox/
COPY fluxbox/* /home/${USERNAME}/.fluxbox/
# Update privs, owners of config files
RUN mkdir -p /var/run/dbus /root/.vnc /home/${USERNAME}/.vnc \
&& touch /root/.Xmodmap /home/${USERNAME}/.Xmodmap \
&& echo "${VNC_PASSWORD}" | tee /root/.vnc/passwd > /home/${USERNAME}/.vnc/passwd \
&& chown -R ${USERNAME}:${USERNAME} /home/${USERNAME}/.Xmodmap /home/${USERNAME}/.fluxbox /home/${USERNAME}/.vnc \
&& chmod +x /usr/local/share/init-dev-container.sh /usr/local/bin/set-resolution
ENTRYPOINT ["/usr/local/share/init-dev-container.sh"]
CMD ["sleep", "infinity"]

82
.devcontainer/README.md Normal file
View file

@ -0,0 +1,82 @@
# Code - OSS Development Container
This repository includes configuration for a development container for working with Code - OSS in an isolated local container or using [Visual Studio Codespaces](https://aka.ms/vso).
> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` with a web client at `6080`. For better performance, we recommend using a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Applications like the macOS Screen Sharing app will not perform as well. [Chicken](https://sourceforge.net/projects/chicken/) is a good macOS alternative.
## Quick start - local
1. Install Docker Desktop or Docker on your local machine. (See [docs](https://aka.ms/vscode-remote/containers/getting-started) for additional details.)
2. [Docker Desktop] If you are not using the new WSL2 Docker Desktop engine, increase the resources allocated to Docker Desktop to at least **4 Cores and 4 GB of RAM (8 GB recommended)**. Right-click on the Docker status bar item, go to **Preferences/Settings > Resources > Advanced** to do so.
> **Note:** The [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) extension is included in the container so you can keep an eye on CPU/Memory in the status bar.
3. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the [Remote - Containers](https://aka.ms/vscode-remote/download/containers) extension.
![Image of Remote - Containers extension](https://microsoft.github.io/vscode-remote-release/images/remote-containers-extn.png)
> Note that the Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
4. Press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **Remote - Containers: Open Repository in Container...**.
> **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or using the Hyper-V engine on Windows. We recommend the "open repository" approach instead since it uses "named volume" rather than the local filesystem.
5. Type `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box and press <kbd>Enter</kbd>.
6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
Anything you start in VS Code or the integrated terminal will appear here.
Next: **[Try it out!](#try-it)**
## Quick start - Codespaces
>Note that the Codespaces browser-based editor cannot currently access the desktop environment in this container (due to a [missing feature](https://github.com/MicrosoftDocs/vsonline/issues/117)). We recommend using Visual Studio Code from the desktop to connect instead in the near term.
1. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the [Visual Studio Codespaces](https://aka.ms/vscs-ext-vscode) extension.
![Image of VS Codespaces extension](https://microsoft.github.io/vscode-remote-release/images/codespaces-extn.png)
> Note that the Visual Studio Codespaces extension requires the Visual Studio Code distribution of Code - OSS.
2. Sign in by pressing <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and selecting **Codespaces: Sign In**. You may also need to use the **Codespaces: Create Plan** if you do not have a plan. See the [Codespaces docs](https://aka.ms/vso-docs/vscode) for details.
3. Press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **Codespaces: Create New Codespace**.
4. Use default settings, select a plan, and then enter the repository URL `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box when prompted.
5. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
6. Anything you start in VS Code or the integrated terminal will appear here.
## Try it!
This container uses the [Fluxbox](http://fluxbox.org/) window manager to keep things lean. **Right-click on the desktop** to see menu options. It works with GNOME and GTK applications, so other tools can be installed if needed.
Note you can also set the resolution from the command line by typing `set-resolution`.
To start working with Code - OSS, follow these steps:
1. In your local VS Code, open a terminal (<kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>\`</kbd>) and type the following commands:
```bash
yarn install
bash scripts/code.sh
```
2. After the build is complete, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
3. You should now see Code - OSS!
Next, let's try debugging.
1. Shut down Code - OSS by clicking the box in the upper right corner of the Code - OSS window through your browser or VNC viewer.
2. Go to your local VS Code client, and use Run / Debug view to launch the **VS Code** configuration. (Typically the default, so you can likely just press <kbd>F5</kbd>).
> **Note:** If launching times out, you can increase the value of `timeout` in the "VS Code", "Attach Main Process", "Attach Extension Host", and "Attach to Shared Process" configurations in [launch.json](../.vscode/launch.json). However, running `scripts/code.sh` first will set up Electron which will usually solve timeout issues.
3. After a bit, Code - OSS will appear with the debugger attached!
Enjoy!

View file

@ -0,0 +1,91 @@
#!/bin/bash
NONROOT_USER=node
LOG=/tmp/container-init.log
# Execute the command it not already running
startInBackgroundIfNotRunning()
{
log "Starting $1."
echo -e "\n** $(date) **" | sudoIf tee -a /tmp/$1.log > /dev/null
if ! pidof $1 > /dev/null; then
keepRunningInBackground "$@"
while ! pidof $1 > /dev/null; do
sleep 1
done
log "$1 started."
else
echo "$1 is already running." | sudoIf tee -a /tmp/$1.log > /dev/null
log "$1 is already running."
fi
}
# Keep command running in background
keepRunningInBackground()
{
($2 sh -c "while :; do echo [\$(date)] Process started.; $3; echo [\$(date)] Process exited!; sleep 5; done 2>&1" | sudoIf tee -a /tmp/$1.log > /dev/null & echo "$!" | sudoIf tee /tmp/$1.pid > /dev/null)
}
# Use sudo to run as root when required
sudoIf()
{
if [ "$(id -u)" -ne 0 ]; then
sudo "$@"
else
"$@"
fi
}
# Use sudo to run as non-root user if not already running
sudoUserIf()
{
if [ "$(id -u)" -eq 0 ]; then
sudo -u ${NONROOT_USER} "$@"
else
"$@"
fi
}
# Log messages
log()
{
echo -e "[$(date)] $@" | sudoIf tee -a $LOG > /dev/null
}
log "** SCRIPT START **"
# Start dbus.
log 'Running "/etc/init.d/dbus start".'
if [ -f "/var/run/dbus/pid" ] && ! pidof dbus-daemon > /dev/null; then
sudoIf rm -f /var/run/dbus/pid
fi
sudoIf /etc/init.d/dbus start 2>&1 | sudoIf tee -a /tmp/dbus-daemon-system.log > /dev/null
while ! pidof dbus-daemon > /dev/null; do
sleep 1
done
# Set up Xvfb.
startInBackgroundIfNotRunning "Xvfb" sudoIf "Xvfb ${DISPLAY:-:1} +extension RANDR -screen 0 ${MAX_VNC_RESOLUTION:-1920x1080x16}"
# Start fluxbox as a light weight window manager.
startInBackgroundIfNotRunning "fluxbox" sudoUserIf "dbus-launch startfluxbox"
# Start x11vnc
startInBackgroundIfNotRunning "x11vnc" sudoIf "x11vnc -display ${DISPLAY:-:1} -rfbport ${VNC_PORT:-5901} -localhost -no6 -xkb -shared -forever -passwdfile $HOME/.vnc/passwd"
# Set resolution
/usr/local/bin/set-resolution ${VNC_RESOLUTION:-1280x720} ${VNC_DPI:-72}
# Spin up noVNC if installed and not runnning.
if [ -d "/usr/local/novnc" ] && [ "$(ps -ef | grep /usr/local/novnc/noVNC*/utils/launch.sh | grep -v grep)" = "" ]; then
keepRunningInBackground "noVNC" sudoIf "/usr/local/novnc/noVNC*/utils/launch.sh --listen ${NOVNC_PORT:-6080} --vnc localhost:${VNC_PORT:-5901}"
log "noVNC started."
else
log "noVNC is already running or not installed."
fi
# Run whatever was passed in
log "Executing \"$@\"."
"$@"
log "** SCRIPT EXIT **"

View file

@ -0,0 +1,25 @@
#!/bin/bash
RESOLUTION=${1:-${VNC_RESOLUTION:-1920x1080}}
DPI=${2:-${VNC_DPI:-72}}
if [ -z "$1" ]; then
echo -e "**Current Settings **\n"
xrandr
echo -n -e "\nEnter new resolution (WIDTHxHEIGHT, blank for ${RESOLUTION}, Ctrl+C to abort).\n> "
read NEW_RES
if [ "${NEW_RES}" != "" ]; then
RESOLUTION=${NEW_RES}
fi
if [ -z "$2" ]; then
echo -n -e "\nEnter new DPI (blank for ${DPI}, Ctrl+C to abort).\n> "
read NEW_DPI
if [ "${NEW_DPI}" != "" ]; then
DPI=${NEW_DPI}
fi
fi
fi
xrandr --fb ${RESOLUTION} --dpi ${DPI} > /dev/null 2>&1
echo -e "\n**New Settings **\n"
xrandr
echo

View file

@ -0,0 +1,41 @@
{
"name": "Code - OSS",
"build": {
"dockerfile": "Dockerfile",
"args": {
"MAX_VNC_RESOLUTION": "1920x1080x16",
"TARGET_VNC_RESOLUTION": "1280x768",
"TARGET_VNC_PORT": "5901",
"TARGET_NOVNC_PORT": "6080",
"VNC_PASSWORD": "vscode",
"INSTALL_FIREFOX": "true"
}
},
"overrideCommand": false,
"runArgs": ["--init"],
"settings": {
// zsh is also available
"terminal.integrated.shell.linux": "/bin/bash",
"resmon.show.battery": false,
"resmon.show.cpufreq": false,
"remote.extensionKind": {
"ms-vscode.js-debug-nightly": "workspace",
"msjsdiag.debugger-for-chrome": "workspace"
},
"debug.chrome.useV3": true
},
// noVNC, VNC ports
"forwardPorts": [6080, 5901],
"extensions": [
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"msjsdiag.debugger-for-chrome",
"mutantdino.resourcemonitor",
"GitHub.vscode-pull-request-github"
],
"remoteUser": "node"
}

View file

@ -0,0 +1,9 @@
[app] (name=code-oss-dev)
[Position] (CENTER) {0 0}
[Maximized] {yes}
[Dimensions] {100% 100%}
[end]
[transient] (role=GtkFileChooserDialog)
[Position] (CENTER) {0 0}
[Dimensions] {70% 70%}
[end]

View file

@ -0,0 +1,9 @@
session.menuFile: ~/.fluxbox/menu
session.keyFile: ~/.fluxbox/keys
session.styleFile: /usr/share/fluxbox/styles//Squared_for_Debian
session.configVersion: 13
session.screen0.workspaces: 1
session.screen0.workspacewarping: false
session.screen0.toolbar.widthPercent: 100
session.screen0.strftimeFormat: %d %b, %a %02k:%M:%S
session.screen0.toolbar.tools: prevworkspace, workspacename, nextworkspace, clock, prevwindow, nextwindow, iconbar, systemtray

View file

@ -0,0 +1,16 @@
[begin] ( Code - OSS Development Container )
[exec] (File Manager) { nautilus ~ } <>
[exec] (Terminal) {/usr/bin/gnome-terminal --working-directory=~ } <>
[exec] (Start Code - OSS) { x-terminal-emulator -T "Code - OSS Build" -e bash /workspaces/vscode*/scripts/code.sh } <>
[submenu] (System >) {}
[exec] (Set Resolution) { x-terminal-emulator -T "Set Resolution" -e bash /usr/local/bin/set-resolution } <>
[exec] (Passwords and Keys) { seahorse } <>
[exec] (Top) { x-terminal-emulator -T "Top" -e /usr/bin/top } <>
[exec] (Editres) {editres} <>
[exec] (Xfontsel) {xfontsel} <>
[exec] (Xkill) {xkill} <>
[exec] (Xrefresh) {xrefresh} <>
[end]
[config] (Configuration >)
[workspaces] (Workspaces >)
[end]

View file

@ -519,7 +519,9 @@
"**/vs/workbench/services/**/common/**",
"**/vs/workbench/api/**/common/**",
"vscode-textmate",
"vscode-oniguruma"
"vscode-oniguruma",
"iconv-lite-umd",
"semver-umd"
]
},
{

View file

@ -1,82 +1,181 @@
{
"$schema": "https://raw.githubusercontent.com/microsoft/vscode-github-triage-actions/master/classifier/apply/apply-labels/classifier-config.schema.json",
"$schema": "https://raw.githubusercontent.com/microsoft/vscode-github-triage-actions/master/classifier-deep/apply/apply-labels/deep-classifier-config.schema.json",
"assignees": {
"JacksonKearl": {
"assign": true
},
"bpasero": {
"assign": true
},
"Tyriar": {
"assign": true
},
"misolori": {
"assign": true
},
"joaomoreno": {
"assign": true
}
"JacksonKearl": {"accuracy": 0.5}
},
"labels": {
"search-editor": {
"applyLabel": true,
"assign": [
"JacksonKearl"
]
},
"snippets": {
"applyLabel": false,
"assign": [
"jrieken"
]
},
"integrated-terminal": {
"applyLabel": true,
"assign": [
"Tyriar"
]
},
"workbench-editors": {
"applyLabel": false,
"assign": [
"bpasero"
]
},
"workbench-history": {
"applyLabel": false,
"assign": [
"bpasero"
]
},
"workbench-notifications": {
"applyLabel": false,
"assign": [
"bpasero"
]
},
"workbench-tabs": {
"applyLabel": false,
"assign": [
"bpasero"
]
},
"icons-product": {
"applyLabel": true,
"assign": [
"misolori"
]
},
"ux": {
"applyLabel": true,
"assign": [
"misolori"
]
},
"git": {
"applyLabel": false,
"assign": [
"joaomoreno"
]
"L10N": {"assign": []},
"VIM": {"assign": []},
"api": {"assign": ["jrieken"]},
"api-finalization": {"assign": []},
"api-proposal": {"assign": ["jrieken"]},
"authentication": {"assign": ["RMacfarlane"]},
"breadcrumbs": {"assign": ["jrieken"]},
"callhierarchy": {"assign": ["jrieken"]},
"code-lens": {"assign": ["jrieken"]},
"color-palette": {"assign": []},
"comments": {"assign": ["rebornix"]},
"config": {"assign": ["sandy081"]},
"context-keys": {"assign": []},
"css-less-scss": {"assign": ["aeschli"]},
"custom-editors": {"assign": ["mjbvz"]},
"debug": {"assign": ["weinand"]},
"debug-console": {"assign": ["weinand"]},
"dialogs": {"assign": ["sbatten"]},
"diff-editor": {"assign": []},
"dropdown": {"assign": []},
"editor": {"assign": ["rebornix"]},
"editor-autoclosing": {"assign": []},
"editor-autoindent": {"assign": ["rebornix"]},
"editor-bracket-matching": {"assign": []},
"editor-clipboard": {"assign": ["jrieken"]},
"editor-code-actions": {"assign": []},
"editor-color-picker": {"assign": ["rebornix"]},
"editor-columnselect": {"assign": ["alexdima"]},
"editor-commands": {"assign": ["jrieken"]},
"editor-comments": {"assign": []},
"editor-contrib": {"assign": []},
"editor-core": {"assign": []},
"editor-drag-and-drop": {"assign": ["rebornix"]},
"editor-error-widget": {"assign": ["sandy081"]},
"editor-find": {"assign": ["rebornix"]},
"editor-folding": {"assign": ["aeschli"]},
"editor-hover": {"assign": []},
"editor-indent-guides": {"assign": []},
"editor-input": {"assign": ["alexdima"]},
"editor-input-IME": {"assign": ["rebornix"]},
"editor-minimap": {"assign": []},
"editor-multicursor": {"assign": ["alexdima"]},
"editor-parameter-hints": {"assign": []},
"editor-render-whitespace": {"assign": []},
"editor-rendering": {"assign": ["alexdima"]},
"editor-scrollbar": {"assign": []},
"editor-symbols": {"assign": ["jrieken"]},
"editor-synced-region": {"assign": ["aeschli"]},
"editor-textbuffer": {"assign": ["rebornix"]},
"editor-theming": {"assign": []},
"editor-wordnav": {"assign": ["alexdima"]},
"editor-wrapping": {"assign": ["alexdima"]},
"emmet": {"assign": []},
"error-list": {"assign": ["sandy081"]},
"explorer-custom": {"assign": ["sandy081"]},
"extension-host": {"assign": []},
"extensions": {"assign": ["sandy081"]},
"extensions-development": {"assign": []},
"file-decorations": {"assign": ["jrieken"]},
"file-encoding": {"assign": ["bpasero"]},
"file-explorer": {"assign": ["isidorn"]},
"file-glob": {"assign": []},
"file-guess-encoding": {"assign": ["bpasero"]},
"file-io": {"assign": ["bpasero"]},
"file-watcher": {"assign": ["bpasero"]},
"font-rendering": {"assign": []},
"formatting": {"assign": []},
"git": {"assign": ["joaomoreno"]},
"gpu": {"assign": ["deepak1556"]},
"grammar": {"assign": ["mjbvz"]},
"grid-view": {"assign": ["joaomoreno"]},
"html": {"assign": ["aeschli"]},
"i18n": {"assign": []},
"icon-brand": {"assign": []},
"icons-product": {"assign": ["misolori"]},
"install-update": {"assign": []},
"integrated-terminal": {"assign": ["Tyriar"]},
"integrated-terminal-conpty": {"assign": ["Tyriar"]},
"integrated-terminal-links": {"assign": ["Tyriar"]},
"integration-test": {"assign": []},
"intellisense-config": {"assign": []},
"ipc": {"assign": ["joaomoreno"]},
"issue-bot": {"assign": ["chrmarti"]},
"issue-reporter": {"assign": ["RMacfarlane"]},
"javascript": {"assign": ["mjbvz"]},
"json": {"assign": ["aeschli"]},
"keybindings": {"assign": []},
"keybindings-editor": {"assign": ["sandy081"]},
"keyboard-layout": {"assign": ["alexdima"]},
"languages-basic": {"assign": ["aeschli"]},
"languages-diagnostics": {"assign": ["jrieken"]},
"layout": {"assign": ["sbatten"]},
"lcd-text-rendering": {"assign": []},
"list": {"assign": ["joaomoreno"]},
"log": {"assign": []},
"markdown": {"assign": ["mjbvz"]},
"marketplace": {"assign": []},
"menus": {"assign": ["sbatten"]},
"merge-conflict": {"assign": ["chrmarti"]},
"notebook": {"assign": ["rebornix"]},
"outline": {"assign": ["jrieken"]},
"output": {"assign": []},
"perf": {"assign": []},
"perf-bloat": {"assign": []},
"perf-startup": {"assign": []},
"php": {"assign": ["roblourens"]},
"portable-mode": {"assign": ["joaomoreno"]},
"proxy": {"assign": []},
"quick-pick": {"assign": ["chrmarti"]},
"references-viewlet": {"assign": ["jrieken"]},
"release-notes": {"assign": []},
"remote": {"assign": []},
"remote-explorer": {"assign": ["alexr00"]},
"rename": {"assign": ["jrieken"]},
"scm": {"assign": ["joaomoreno"]},
"screencast-mode": {"assign": ["joaomoreno"]},
"search": {"assign": ["roblourens"]},
"search-editor": {"assign": ["JacksonKearl"]},
"search-replace": {"assign": ["sandy081"]},
"semantic-tokens": {"assign": ["aeschli"]},
"settings-editor": {"assign": ["roblourens"]},
"settings-sync": {"assign": ["sandy081"]},
"simple-file-dialog": {"assign": ["alexr00"]},
"smart-select": {"assign": ["jrieken"]},
"smoke-test": {"assign": []},
"snap": {"assign": ["joaomoreno"]},
"snippets": {"assign": ["jrieken"]},
"splitview": {"assign": ["joaomoreno"]},
"suggest": {"assign": ["jrieken"]},
"tasks": {"assign": ["alexr00"]},
"telemetry": {"assign": []},
"themes": {"assign": ["aeschli"]},
"timeline": {"assign": ["eamodio"]},
"timeline-git": {"assign": ["eamodio"]},
"titlebar": {"assign": ["sbatten"]},
"tokenization": {"assign": []},
"tree": {"assign": ["joaomoreno"]},
"typescript": {"assign": ["mjbvz"]},
"undo-redo": {"assign": []},
"unit-test": {"assign": []},
"uri": {"assign": ["jrieken"]},
"ux": {"assign": ["misolori"]},
"variable-resolving": {"assign": []},
"vscode-build": {"assign": []},
"web": {"assign": ["bpasero"]},
"webview": {"assign": ["mjbvz"]},
"workbench-cli": {"assign": []},
"workbench-diagnostics": {"assign": ["RMacfarlane"]},
"workbench-dnd": {"assign": ["bpasero"]},
"workbench-editor-grid": {"assign": ["sbatten"]},
"workbench-editors": {"assign": ["bpasero"]},
"workbench-electron": {"assign": ["deepak1556"]},
"workbench-feedback": {"assign": ["bpasero"]},
"workbench-history": {"assign": ["bpasero"]},
"workbench-hot-exit": {"assign": ["Tyriar"]},
"workbench-launch": {"assign": []},
"workbench-link": {"assign": []},
"workbench-multiroot": {"assign": ["bpasero"]},
"workbench-notifications": {"assign": ["bpasero"]},
"workbench-os-integration": {"assign": []},
"workbench-rapid-render": {"assign": ["jrieken"]},
"workbench-run-as-admin": {"assign": []},
"workbench-state": {"assign": ["bpasero"]},
"workbench-status": {"assign": ["bpasero"]},
"workbench-tabs": {"assign": ["bpasero"]},
"workbench-touchbar": {"assign": ["bpasero"]},
"workbench-views": {"assign": ["sbatten"]},
"workbench-welcome": {"assign": ["chrmarti"]},
"workbench-window": {"assign": ["bpasero"]},
"workbench-zen": {"assign": ["isidorn"]},
"workspace-edit": {"assign": ["jrieken"]},
"workspace-symbols": {"assign": []},
"zoom": {"assign": ["alexdima"] }
}
}
}

15
.github/commands.json vendored
View file

@ -221,6 +221,19 @@
"addLabel": "*caused-by-extension",
"comment": "It looks like this is caused by the C++ extension. Please file it with the repository [here](https://github.com/Microsoft/vscode-cpptools). Make sure to check their issue reporting template and provide them relevant information such as the extension version you're using. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines for more information.\n\nHappy Coding!"
},
{
"type": "comment",
"name": "extCpp",
"allowUsers": [
"cleidigh",
"usernamehw",
"gjsjohnmurray",
"IllusionMH"
],
"action": "close",
"addLabel": "*caused-by-extension",
"comment": "It looks like this is caused by the C++ extension. Please file it with the repository [here](https://github.com/Microsoft/vscode-cpptools). Make sure to check their issue reporting template and provide them relevant information such as the extension version you're using. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines for more information.\n\nHappy Coding!"
},
{
"type": "comment",
"name": "extTS",
@ -271,7 +284,7 @@
],
"action": "close",
"addLabel": "*caused-by-extension",
"comment": "It looks like this is caused by the Go extension. Please file it with the repository [here](https://github.com/microsoft/vscode-go). Make sure to check their [contributing guidelines](https://github.com/microsoft/vscode-go/blob/master/CONTRIBUTING.md) and provide relevant information such as the extension version you're using. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines for more information.\n\nHappy Coding!"
"comment": "It looks like this is caused by the Go extension. Please file it with the repository [here](https://github.com/golang/vscode-go). Make sure to check their issue reporting template and provide them relevant information such as the extension version you're using. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines for more information.\n\nHappy Coding!"
},
{
"type": "comment",

View file

@ -17,7 +17,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v27
ref: v31
path: ./actions
- name: Install Actions
if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'author-verification-requested')

View file

@ -1,40 +0,0 @@
name: "Classifier: Trainer"
on:
schedule:
- cron: 0 0 12 * *
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: master
lfs: true
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12
- name: "Run Classifier: Scraper"
uses: ./actions/classifier/train/fetch-issues
with:
token: ${{secrets.ISSUE_SCRAPER_TOKEN}} # My personal token, so as to not risk going over quota on main token
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade numpy scipy scikit-learn joblib nltk
- name: "Run Classifier: Generator"
run: python ./actions/classifier/train/generate-models/generate.py category
- name: "Run Classifier: Upload"
uses: ./actions/classifier/train/upload-models
with:
blobContainerName: classifier-models
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}

View file

@ -13,7 +13,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Run Commands

View file

@ -0,0 +1,23 @@
name: "Deep Classifier: Monitor"
on:
issues:
types: [unassigned]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v31
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: "Run Classifier: Monitor"
uses: ./actions/classifier-deep/monitor
with:
botName: vscode-triage-bot
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}

View file

@ -1,7 +1,9 @@
name: "Classifier: Apply"
name: "Deep Classifier: Runner"
on:
schedule:
- cron: 0,30 * * * *
- cron: 0/30 * * * *
repository_dispatch:
types: [trigger-deep-classifier-runner]
jobs:
main:
@ -11,7 +13,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v22 # dont bump, we use an old classifier implementation. TODO: dont.
ref: v31
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
@ -19,13 +21,16 @@ jobs:
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12
- name: "Run Classifier: Scraper"
uses: ./actions/classifier/apply/fetch-issues
uses: ./actions/classifier-deep/apply/fetch-sources
with:
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
from: 45
from: 40
until: 5
blobContainerName: classifier-models
configPath: classifier
blobContainerName: vscode-issue-classifier
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
@ -33,11 +38,13 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade numpy scipy scikit-learn joblib nltk
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
- name: "Run Classifier: Generator"
run: python ./actions/classifier/apply/generate-labels/main.py
run: python ./actions/classifier-deep/apply/generate-labels/main.py
- name: "Run Classifier: Labeler"
uses: ./actions/classifier/apply/apply-labels
uses: ./actions/classifier-deep/apply/apply-labels
with:
config-path: classifier
configPath: classifier
allowLabels: "needs more info|new release"
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}

View file

@ -0,0 +1,27 @@
name: "Deep Classifier: Scraper"
on:
repository_dispatch:
types: [trigger-deep-classifier-scraper]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v31
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12
- name: "Run Classifier: Scraper"
uses: ./actions/classifier-deep/train/fetch-issues
with:
blobContainerName: vscode-issue-classifier
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}

View file

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v27
ref: v31
path: ./actions
- name: Install Actions
if: contains(github.event.issue.labels.*.name, '*english-please')

View file

@ -18,7 +18,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'feature-request')
run: npm install --production --prefix ./actions

View file

@ -2,6 +2,8 @@ name: Latest Release Monitor
on:
schedule:
- cron: 0/5 * * * *
repository_dispatch:
types: [trigger-latest-release-monitor]
jobs:
main:
@ -12,7 +14,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Storage Module
@ -20,6 +22,6 @@ jobs:
- name: Run Latest Release Monitor
uses: ./actions/latest-release-monitor
with:
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING_NEW}}
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}

View file

@ -14,7 +14,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Run Locker

View file

@ -14,7 +14,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Run Needs More Info Closer

View file

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v27
ref: v31
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions

View file

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v27
ref: v31
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
@ -48,6 +48,14 @@ jobs:
mustNotMatch: "^We have written the needed data into your clipboard because it was too large to send\\. Please paste\\.$"
comment: "It looks like you're using the VS Code Issue Reporter but did not paste the text generated into the created issue. We've closed this issue, please open a new one containing the text we placed in your clipboard.\n\nHappy Coding!"
- name: Run Clipboard Labeler (Chinese)
uses: ./actions/regex-labeler
with:
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
label: "invalid"
mustNotMatch: "^所需的数据太大,无法直接发送。我们已经将其写入剪贴板,请粘贴。$"
comment: "看起来您正在使用 VS Code 问题报告程序,但是没有将生成的文本粘贴到创建的问题中。我们将关闭这个问题,请使用剪贴板中的内容创建一个新的问题。\n\n祝您使用愉快"
# source of truth in ./english-please.yml
- name: Run English Please
uses: ./actions/english-please

View file

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: 'microsoft/vscode-github-triage-actions'
ref: v27
ref: v31
path: ./actions
- name: Checkout Repo
if: github.event_name != 'issues'

23
.github/workflows/rich-navigation.yml vendored Normal file
View file

@ -0,0 +1,23 @@
name: "Rich Navigation Indexing"
on:
pull_request:
push:
branches:
- master
jobs:
richnav:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
- name: Install dependencies
run: yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: 1
- uses: microsoft/RichCodeNavIndexer@master
with:
languages: typescript
repo-token: ${{ secrets.GITHUB_TOKEN }}
continue-on-error: true

View file

@ -14,7 +14,7 @@ jobs:
with:
repository: 'microsoft/vscode-github-triage-actions'
path: ./actions
ref: v27
ref: v31
- name: Install Actions
if: contains(github.event.issue.labels.*.name, 'testplan-item') || contains(github.event.issue.labels.*.name, 'invalid-testplan-item')
run: npm install --production --prefix ./actions

3
.vscode/launch.json vendored
View file

@ -26,6 +26,7 @@
"type": "pwa-chrome",
"request": "attach",
"name": "Attach to Shared Process",
"timeout": 30000,
"port": 9222,
"urlFilter": "*sharedProcess.html*",
"presentation": {
@ -57,6 +58,7 @@
"type": "node",
"request": "attach",
"name": "Attach to Main Process",
"timeout": 30000,
"port": 5875,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@ -198,6 +200,7 @@
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
},
"cleanUp": "wholeBrowser",
"breakOnLoad": false,
"urlFilter": "*workbench.html*",
"runtimeArgs": [

View file

@ -2,12 +2,14 @@
{
"kind": 1,
"language": "markdown",
"value": "##### `Config`: defines the inbox query"
"value": "##### `Config`: defines the inbox query",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item "
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
"editable": true
},
{
"kind": 1,
@ -18,7 +20,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$inbox -label:\"needs more info\"",
"value": "$inbox -label:\"needs more info\" -label:emmet",
"editable": true
},
{
@ -31,6 +33,6 @@
"kind": 2,
"language": "github-issues",
"value": "$inbox",
"editable": true
"editable": false
}
]

View file

@ -20,7 +20,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone assignee:@me is:open\n",
"value": "$repos $milestone assignee:@me is:open",
"editable": false
},
{

View file

@ -0,0 +1,55 @@
[
{
"kind": 1,
"language": "markdown",
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. ",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "#### Config: update list of `repos` and the `milestone`",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"June 2020\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs You Filed",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me",
"editable": false
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs From Outside",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand",
"editable": false
},
{
"kind": 1,
"language": "markdown",
"value": "### All"
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate",
"editable": false
}
]

View file

@ -26,6 +26,7 @@
"test/automation/out/**": true,
"test/integration/browser/out/**": true,
"src/vs/base/test/node/uri.test.data.txt": true,
"src/vs/workbench/test/browser/api/extHostDocumentData.test.perf-data.ts": true,
"src/vs/server": false
},
"lcov.path": [
@ -72,7 +73,7 @@
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true,
"[typescript]": {
"[typescript]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"typescript.tsc.autoDetect": "off"

41
.vscode/tasks.json vendored
View file

@ -26,8 +26,8 @@
"message": 3
},
"background": {
"beginsPattern": "Starting compilation",
"endsPattern": "Finished compilation"
"beginsPattern": "\\[watch-client\\].*Starting compilation",
"endsPattern": "\\[watch-client\\].*Finished compilation"
}
}
},
@ -41,6 +41,43 @@
},
"problemMatcher": "$tsc"
},
{
"type": "npm",
"script": "watch-webd",
"label": "Build Web Extensions",
"group": "build",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"owner": "typescript",
"applyTo": "closedDocuments",
"fileLocation": [
"absolute"
],
"pattern": {
"regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
"file": 1,
"location": 2,
"message": 3
},
"background": {
"beginsPattern": "Starting compilation",
"endsPattern": "Finished compilation"
}
}
},
{
"type": "npm",
"script": "kill-watch-webd",
"label": "Kill Build Web Extensions",
"group": "build",
"presentation": {
"reveal": "never",
},
"problemMatcher": "$tsc"
},
{
"label": "Run tests",
"type": "shell",

View file

@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "7.3.1"
target "7.3.2"
runtime "electron"

View file

@ -54,6 +54,10 @@ Many of the core components and extensions to VS Code live in their own reposito
VS Code includes a set of built-in extensions located in the [extensions](extensions) folder, including grammars and snippets for many languages. Extensions that provide rich language support (code completion, Go to Definition) for a language have the suffix `language-features`. For example, the `json` extension provides coloring for `JSON` and the `json-language-features` provides rich language support for `JSON`.
## Development Container
This repository includes a Visual Studio Code Remote - Containers / Codespaces development container. You can open it using the **Remote-Containers: Open Repository in Container...** from the [Remote - Containers](https://aka.ms/vscode-remote/download/containers) extension or by referencing the repository Git URL when creating a cloud-based environment using the [Visual Studio Codespaces](https://aka.ms/vscs-ext-vscode) extension. See the [development container README](.devcontainer/README.md) for more details on its use.
## Code of Conduct
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.

27
build/.webignore Normal file
View file

@ -0,0 +1,27 @@
# cleanup rules for web node modules, .gitignore style
**/*.txt
**/*.json
**/*.md
**/*.d.ts
**/*.js.map
**/LICENSE
**/CONTRIBUTORS
jschardet/index.js
jschardet/src/**
jschardet/dist/jschardet.js
vscode-textmate/webpack.config.js
xterm/src/**
xterm-addon-search/src/**
xterm-addon-search/out/**
xterm-addon-search/fixtures/**
xterm-addon-unicode11/src/**
xterm-addon-unicode11/out/**
xterm-addon-webgl/src/**
xterm-addon-webgl/out/**

View file

@ -10,10 +10,10 @@ git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
cd ..
rm -rf extraction
rm -rf extraction

View file

@ -31,10 +31,10 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-8.0.x
git checkout origin/electron-x.y.z
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-8.0.x
git push origin HEAD:electron-x.y.z
displayName: Sync & Merge Exploration

View file

@ -218,7 +218,7 @@ steps:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
externalFeedCredentials: 'ESRP Nuget'
restoreDirectory: packages
- task: ESRPImportCertTask@1

View file

@ -10,7 +10,7 @@ const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false } });
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();

View file

@ -8,9 +8,11 @@ require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
const nodeUtil = require('util');
const tsb = require('gulp-tsb');
const es = require('event-stream');
const filter = require('gulp-filter');
const webpack = require('webpack');
const util = require('./lib/util');
const task = require('./lib/task');
const watcher = require('./lib/watch');
@ -21,6 +23,8 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@ -167,3 +171,78 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
gulp.task(compileExtensionsBuildTask);
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
gulp.task(compileWebExtensionsTask);
exports.compileWebExtensionsTask = compileWebExtensionsTask;
const watchWebExtensionsTask = task.define('watch-web', () => buildWebExtensions(true));
gulp.task(watchWebExtensionsTask);
exports.watchWebExtensionsTask = watchWebExtensionsTask;
async function buildWebExtensions(isWatch) {
const webpackConfigLocations = await nodeUtil.promisify(glob)(
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
{ ignore: ['**/node_modules'] }
);
const webpackConfigs = [];
for (const webpackConfigPath of webpackConfigLocations) {
const configOrFnOrArray = require(webpackConfigPath);
function addConfig(configOrFn) {
if (typeof configOrFn === 'function') {
webpackConfigs.push(configOrFn({}, {}));
} else {
webpackConfigs.push(configOrFn);
}
}
addConfig(configOrFnOrArray);
}
function reporter(fullStats) {
if (Array.isArray(fullStats.children)) {
for (const stats of fullStats.children) {
const outputPath = stats.outputPath;
if (outputPath) {
const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
fancyLog(`Finished ${ansiColors.green('packaging web extension')} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
}
if (Array.isArray(stats.errors)) {
stats.errors.forEach(error => {
fancyLog.error(error);
});
}
if (Array.isArray(stats.warnings)) {
stats.warnings.forEach(warning => {
fancyLog.warn(warning);
});
}
}
}
}
return new Promise((resolve, reject) => {
if (isWatch) {
webpack(webpackConfigs).watch({}, (err, stats) => {
if (err) {
reject();
} else {
reporter(stats.toJson());
}
});
} else {
webpack(webpackConfigs).run((err, stats) => {
if (err) {
fancyLog.error(err);
reject();
} else {
reporter(stats.toJson());
resolve();
}
});
}
});
}

View file

@ -47,6 +47,7 @@ const nodeModules = ['electron', 'original-fs']
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
buildfile.base,
buildfile.workerExtensionHost,
buildfile.workbenchDesktop,
buildfile.code
]);
@ -58,10 +59,12 @@ const vscodeResources = [
'out-build/bootstrap.js',
'out-build/bootstrap-fork.js',
'out-build/bootstrap-amd.js',
'out-build/bootstrap-node.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,html}',
'!out-build/vs/code/browser/**/*.html',
'!out-build/vs/editor/standalone/**/*.svg',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
@ -72,6 +75,7 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/platform/files/**/*.exe',

View file

@ -13,6 +13,10 @@ module.exports = new class ApiLiteralOrTypes {
create(context) {
return {
['TSTypeAnnotation TSUnionType TSLiteralType']: (node) => {
var _a;
if (((_a = node.literal) === null || _a === void 0 ? void 0 : _a.type) === 'TSNullKeyword') {
return;
}
context.report({
node: node,
messageId: 'useEnum'

View file

@ -15,6 +15,9 @@ export = new class ApiLiteralOrTypes implements eslint.Rule.RuleModule {
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
return {
['TSTypeAnnotation TSUnionType TSLiteralType']: (node: any) => {
if (node.literal?.type === 'TSNullKeyword') {
return;
}
context.report({
node: node,
messageId: 'useEnum'

View file

@ -4,7 +4,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.packageMarketplaceExtensionsStream = exports.packageLocalWebExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
exports.translatePackageJSON = exports.scanBuiltinExtensions = exports.packageMarketplaceWebExtensionsStream = exports.packageMarketplaceExtensionsStream = exports.packageLocalWebExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
const es = require("event-stream");
const fs = require("fs");
const glob = require("glob");
@ -245,3 +245,75 @@ function packageMarketplaceExtensionsStream() {
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
function packageMarketplaceWebExtensionsStream(builtInExtensions) {
const extensions = builtInExtensions
.map(extension => {
const input = fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
if (data.main) {
data.browser = data.main;
}
data.extensionKind = ['web'];
return data;
});
});
return es.merge(extensions);
}
exports.packageMarketplaceWebExtensionsStream = packageMarketplaceWebExtensionsStream;
function scanBuiltinExtensions(extensionsRoot, forWeb) {
const scannedExtensions = [];
const extensionsFolders = fs.readdirSync(extensionsRoot);
for (const extensionFolder of extensionsFolders) {
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const extensionKind = packageJSON['extensionKind'] || [];
if (forWeb && extensionKind.indexOf('web') === -1) {
continue;
}
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
const packageNLS = children.filter(child => child === 'package.nls.json')[0];
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
if (packageNLS) {
// temporary
packageJSON = translatePackageJSON(packageJSON, path.join(extensionsRoot, extensionFolder, packageNLS));
}
scannedExtensions.push({
extensionPath: extensionFolder,
packageJSON,
packageNLSPath: packageNLS ? path.join(extensionFolder, packageNLS) : undefined,
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
});
}
return scannedExtensions;
}
exports.scanBuiltinExtensions = scanBuiltinExtensions;
function translatePackageJSON(packageJSON, packageNLSPath) {
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj) => {
for (let key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);
}
else if (val && typeof val === 'object') {
translate(val);
}
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) {
obj[key] = translated;
}
}
}
};
translate(packageJSON);
return packageJSON;
}
exports.translatePackageJSON = translatePackageJSON;

View file

@ -78,7 +78,7 @@ function fromLocal(extensionPath: string, forWeb: boolean): Stream {
});
}
return minimizeLanguageJSON(input)
return minimizeLanguageJSON(input);
}
@ -291,3 +291,82 @@ export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
return es.merge(extensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
export function packageMarketplaceWebExtensionsStream(builtInExtensions: IBuiltInExtension[]): NodeJS.ReadWriteStream {
const extensions = builtInExtensions
.map(extension => {
const input = fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data: any) => {
if (data.main) {
data.browser = data.main;
}
data.extensionKind = ['web'];
return data;
});
});
return es.merge(extensions);
}
export interface IScannedBuiltinExtension {
extensionPath: string,
packageJSON: any,
packageNLSPath?: string,
readmePath?: string,
changelogPath?: string,
}
export function scanBuiltinExtensions(extensionsRoot: string, forWeb: boolean): IScannedBuiltinExtension[] {
const scannedExtensions: IScannedBuiltinExtension[] = [];
const extensionsFolders = fs.readdirSync(extensionsRoot);
for (const extensionFolder of extensionsFolders) {
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const extensionKind: string[] = packageJSON['extensionKind'] || [];
if (forWeb && extensionKind.indexOf('web') === -1) {
continue;
}
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
const packageNLS = children.filter(child => child === 'package.nls.json')[0];
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
if (packageNLS) {
// temporary
packageJSON = translatePackageJSON(packageJSON, path.join(extensionsRoot, extensionFolder, packageNLS))
}
scannedExtensions.push({
extensionPath: extensionFolder,
packageJSON,
packageNLSPath: packageNLS ? path.join(extensionFolder, packageNLS) : undefined,
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
});
}
return scannedExtensions;
}
export function translatePackageJSON(packageJSON: string, packageNLSPath: string) {
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj: any) => {
for (let key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);
} else if (val && typeof val === 'object') {
translate(val);
} else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) {
obj[key] = translated;
}
}
}
};
translate(packageJSON);
return packageJSON;
}

View file

@ -16,7 +16,7 @@ const https = require("https");
const gulp = require("gulp");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const iconv = require("iconv-lite");
const iconv = require("iconv-lite-umd");
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
function log(message, ...rest) {
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
@ -101,161 +101,158 @@ class TextModel {
return this._lines;
}
}
let XLF = /** @class */ (() => {
class XLF {
constructor(project) {
this.project = project;
this.buffer = [];
this.files = Object.create(null);
this.numberOfMessages = 0;
class XLF {
constructor(project) {
this.project = project;
this.buffer = [];
this.files = Object.create(null);
this.numberOfMessages = 0;
}
toString() {
this.appendHeader();
for (let file in this.files) {
this.appendNewLine(`<file original="${file}" source-language="en" datatype="plaintext"><body>`, 2);
for (let item of this.files[file]) {
this.addStringItem(file, item);
}
this.appendNewLine('</body></file>', 2);
}
toString() {
this.appendHeader();
for (let file in this.files) {
this.appendNewLine(`<file original="${file}" source-language="en" datatype="plaintext"><body>`, 2);
for (let item of this.files[file]) {
this.addStringItem(file, item);
this.appendFooter();
return this.buffer.join('\r\n');
}
addFile(original, keys, messages) {
if (keys.length === 0) {
console.log('No keys in ' + original);
return;
}
if (keys.length !== messages.length) {
throw new Error(`Unmatching keys(${keys.length}) and messages(${messages.length}).`);
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
let realKey;
let comment;
if (Is.string(key)) {
realKey = key;
comment = undefined;
}
else if (LocalizeInfo.is(key)) {
realKey = key.key;
if (key.comment && key.comment.length > 0) {
comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n');
}
this.appendNewLine('</body></file>', 2);
}
this.appendFooter();
return this.buffer.join('\r\n');
}
addFile(original, keys, messages) {
if (keys.length === 0) {
console.log('No keys in ' + original);
return;
if (!realKey || existingKeys.has(realKey)) {
continue;
}
if (keys.length !== messages.length) {
throw new Error(`Unmatching keys(${keys.length}) and messages(${messages.length}).`);
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
let realKey;
let comment;
if (Is.string(key)) {
realKey = key;
comment = undefined;
}
else if (LocalizeInfo.is(key)) {
realKey = key.key;
if (key.comment && key.comment.length > 0) {
comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n');
}
}
if (!realKey || existingKeys.has(realKey)) {
continue;
}
existingKeys.add(realKey);
let message = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
addStringItem(file, item) {
if (!item.id || item.message === undefined || item.message === null) {
throw new Error(`No item ID or value specified: ${JSON.stringify(item)}. File: ${file}`);
}
if (item.message.length === 0) {
log(`Item with id ${item.id} in file ${file} has an empty message.`);
}
this.appendNewLine(`<trans-unit id="${item.id}">`, 4);
this.appendNewLine(`<source xml:lang="en">${item.message}</source>`, 6);
if (item.comment) {
this.appendNewLine(`<note>${item.comment}</note>`, 6);
}
this.appendNewLine('</trans-unit>', 4);
}
appendHeader() {
this.appendNewLine('<?xml version="1.0" encoding="utf-8"?>', 0);
this.appendNewLine('<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">', 0);
}
appendFooter() {
this.appendNewLine('</xliff>', 0);
}
appendNewLine(content, indent) {
let line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
existingKeys.add(realKey);
let message = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files = [];
parser.parseString(xlfString, function (_err, result) {
const fileNodes = result['xliff']['file'];
fileNodes.forEach(file => {
const originalFilePath = file.$.original;
const messages = {};
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit) => {
const key = unit.$.id;
const val = pseudify(unit.source[0]['_'].toString());
if (key && val) {
messages[key] = decodeEntities(val);
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: 'ps' });
}
});
resolve(files);
});
});
};
XLF.parse = function (xlfString) {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
}
const fileNodes = result['xliff']['file'];
if (!fileNodes) {
reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`));
}
fileNodes.forEach((file) => {
const originalFilePath = file.$.original;
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
const messages = {};
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit) => {
const key = unit.$.id;
if (!unit.target) {
return; // No translation available
}
let val = unit.target[0];
if (typeof val !== 'string') {
val = val._;
}
if (key && val) {
messages[key] = decodeEntities(val);
}
else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
}
});
resolve(files);
});
});
};
return XLF;
})();
addStringItem(file, item) {
if (!item.id || item.message === undefined || item.message === null) {
throw new Error(`No item ID or value specified: ${JSON.stringify(item)}. File: ${file}`);
}
if (item.message.length === 0) {
log(`Item with id ${item.id} in file ${file} has an empty message.`);
}
this.appendNewLine(`<trans-unit id="${item.id}">`, 4);
this.appendNewLine(`<source xml:lang="en">${item.message}</source>`, 6);
if (item.comment) {
this.appendNewLine(`<note>${item.comment}</note>`, 6);
}
this.appendNewLine('</trans-unit>', 4);
}
appendHeader() {
this.appendNewLine('<?xml version="1.0" encoding="utf-8"?>', 0);
this.appendNewLine('<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">', 0);
}
appendFooter() {
this.appendNewLine('</xliff>', 0);
}
appendNewLine(content, indent) {
let line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files = [];
parser.parseString(xlfString, function (_err, result) {
const fileNodes = result['xliff']['file'];
fileNodes.forEach(file => {
const originalFilePath = file.$.original;
const messages = {};
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit) => {
const key = unit.$.id;
const val = pseudify(unit.source[0]['_'].toString());
if (key && val) {
messages[key] = decodeEntities(val);
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: 'ps' });
}
});
resolve(files);
});
});
};
XLF.parse = function (xlfString) {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
}
const fileNodes = result['xliff']['file'];
if (!fileNodes) {
reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`));
}
fileNodes.forEach((file) => {
const originalFilePath = file.$.original;
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
const messages = {};
const transUnits = file.body[0]['trans-unit'];
if (transUnits) {
transUnits.forEach((unit) => {
const key = unit.$.id;
if (!unit.target) {
return; // No translation available
}
let val = unit.target[0];
if (typeof val !== 'string') {
val = val._;
}
if (key && val) {
messages[key] = decodeEntities(val);
}
else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
}
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
}
});
resolve(files);
});
});
};
class Limiter {
constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@ -1178,9 +1175,10 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
});
const basename = path.basename(originalFilePath);
const filePath = `${basename}.${language.id}.isl`;
const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
return new File({
path: filePath,
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
contents: Buffer.from(encoded),
});
}
function encodeEntities(value) {

View file

@ -218,6 +218,10 @@
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/views",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"

View file

@ -15,7 +15,7 @@ import * as https from 'https';
import * as gulp from 'gulp';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import * as iconv from 'iconv-lite';
import * as iconv from 'iconv-lite-umd';
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
@ -1339,10 +1339,11 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
const basename = path.basename(originalFilePath);
const filePath = `${basename}.${language.id}.isl`;
const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
return new File({
path: filePath,
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
contents: Buffer.from(encoded),
});
}

View file

@ -73,3 +73,5 @@ yarnInstall('test/automation'); // node modules required for smoketest
yarnInstall('test/smoke'); // node modules required for smoketest
yarnInstall('test/integration/browser'); // node modules required for integration
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
cp.execSync('git config pull.rebase true');

View file

@ -38,15 +38,15 @@
"gulp-bom": "^1.0.0",
"gulp-sourcemaps": "^1.11.0",
"gulp-uglify": "^3.0.0",
"iconv-lite": "0.4.23",
"iconv-lite-umd": "0.6.5",
"mime": "^1.3.4",
"minimatch": "3.0.4",
"minimist": "^1.2.3",
"request": "^2.85.0",
"terser": "4.3.8",
"typescript": "^3.9.3",
"typescript": "^4.0.0-dev.20200629",
"vsce": "1.48.0",
"vscode-telemetry-extractor": "^1.5.4",
"vscode-telemetry-extractor": "^1.6.0",
"xml2js": "^0.4.17"
},
"scripts": {

View file

@ -1132,7 +1132,7 @@ begin
end;
end;
// http://stackoverflow.com/a/23838239/261019
// https://stackoverflow.com/a/23838239/261019
procedure Explode(var Dest: TArrayOfString; Text: String; Separator: String);
var
i, p: Integer;

View file

@ -394,6 +394,11 @@ acorn@4.X:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=
agent-base@5:
version "5.1.1"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c"
integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g==
ajv@^4.9.1:
version "4.11.8"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
@ -845,7 +850,7 @@ debug@2.X, debug@^2.6.8:
dependencies:
ms "2.0.0"
debug@^4.1.1:
debug@4, debug@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==
@ -1415,12 +1420,18 @@ http-signature@~1.2.0:
jsprim "^1.2.2"
sshpk "^1.7.0"
iconv-lite@0.4.23:
version "0.4.23"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63"
integrity sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==
https-proxy-agent@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b"
integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg==
dependencies:
safer-buffer ">= 2.1.2 < 3"
agent-base "5"
debug "4"
iconv-lite-umd@0.6.5:
version "0.6.5"
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
ignore@^5.1.1:
version "5.1.2"
@ -2053,6 +2064,11 @@ process-nextick-args@~2.0.0:
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==
proxy-from-env@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2"
integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
@ -2228,11 +2244,6 @@ safe-buffer@~5.1.0, safe-buffer@~5.1.1:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
"safer-buffer@>= 2.1.2 < 3":
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
sax@0.5.2:
version "0.5.2"
resolved "https://registry.yarnpkg.com/sax/-/sax-0.5.2.tgz#735ffaa39a1cff8ffb9598f0223abdb03a9fb2ea"
@ -2519,10 +2530,10 @@ typescript@^3.0.1:
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
typescript@^3.9.3:
version "3.9.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.3.tgz#d3ac8883a97c26139e42df5e93eeece33d610b8a"
integrity sha512-D/wqnB2xzNFIcoBG9FG8cXRDjiqSTbG2wd8DMZeQyJlP1vfTkIxH4GKveWaEBYySKIg+USu+E+EDIR47SqnaMQ==
typescript@^4.0.0-dev.20200629:
version "4.0.0-dev.20200629"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.0-dev.20200629.tgz#4631667ebffe3a340beee885a4bebe3a73b6f18e"
integrity sha512-c4DUu7KvTcx4x7V8sBWexYNkCfioiH1huOJL6WFAA8Oot0Gr/+PlKKDBS9fYjsadEv1JI1qboJKobwLQn0kQXw==
typical@^4.0.0:
version "4.0.0"
@ -2654,19 +2665,22 @@ vsce@1.48.0:
yauzl "^2.3.1"
yazl "^2.2.2"
vscode-ripgrep@^1.5.6:
version "1.5.7"
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.5.7.tgz#acb6b548af488a4bca5d0f1bb5faf761343289ce"
integrity sha512-/Vsz/+k8kTvui0q3O74pif9FK0nKopgFTiGNVvxicZANxtSA8J8gUE9GQ/4dpi7D/2yI/YVORszwVskFbz46hQ==
vscode-ripgrep@^1.6.2:
version "1.6.2"
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.6.2.tgz#fb912c7465699f10ce0218a6676cc632c77369b4"
integrity sha512-jkZEWnQFcE+QuQFfxQXWcWtDafTmgkp3DjMKawDkajZwgnDlGKpFp15ybKrZNVTi1SLEF/12BzxYSZVVZ2XrkA==
dependencies:
https-proxy-agent "^4.0.0"
proxy-from-env "^1.1.0"
vscode-telemetry-extractor@^1.5.4:
version "1.5.4"
resolved "https://registry.yarnpkg.com/vscode-telemetry-extractor/-/vscode-telemetry-extractor-1.5.4.tgz#bcb0d17667fa1b77715e3a3bf372ade18f846782"
integrity sha512-MN9LNPo0Rc6cy3sIWTAG97PTWkEKdRnP0VeYoS8vjKSNtG9CAsrUxHgFfYoHm2vNK/ijd0a4NzETyVGO2kT6hw==
vscode-telemetry-extractor@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/vscode-telemetry-extractor/-/vscode-telemetry-extractor-1.6.0.tgz#e9d9c1d24863cce8d3d715f0287de3b31eb90c56"
integrity sha512-zSxvkbyAMa1lTRGIHfGg7gW2e9Sey+2zGYD19uNWCsVEfoXAr2NB6uzb0sNHtbZ2SSqxSePmFXzBAavsudT5fw==
dependencies:
command-line-args "^5.1.1"
ts-morph "^3.1.3"
vscode-ripgrep "^1.5.6"
vscode-ripgrep "^1.6.2"
vso-node-api@6.1.2-preview:
version "6.1.2-preview"

View file

@ -200,6 +200,135 @@
},
{
"name": "big-integer",
"prependLicenseText": ["Copyright released to public domain"]
"prependLicenseText": [
"Copyright released to public domain"
]
},
{
// Reason: The license at https://github.com/justmoon/node-extend/blob/main/LICENSE
// cannot be found by the OSS tool automatically.
"name": "extend",
"fullLicenseText": [
"The MIT License (MIT)",
"",
"Copyright (c) 2014 Stefan Thomas",
"",
"Permission is hereby granted, free of charge, to any person obtaining",
"a copy of this software and associated documentation files (the",
"\"Software\"), to deal in the Software without restriction, including",
"without limitation the rights to use, copy, modify, merge, publish,",
"distribute, sublicense, and/or sell copies of the Software, and to",
"permit persons to whom the Software is furnished to do so, subject to",
"the following conditions:",
"",
"The above copyright notice and this permission notice shall be",
"included in all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,",
"EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF",
"MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND",
"NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE",
"LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION",
"OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION",
"WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
]
},
{
// Reason: The license at https://github.com/retep998/winapi-rs/blob/0.3/LICENSE-MIT
// cannot be found by the OSS tool automatically.
"name": "retep998/winapi-rs",
"fullLicenseText": [
"Copyright (c) 2015-2018 The winapi-rs Developers",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all",
"copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE."
]
},
{
// Reason: The license at https://github.com/digitaldesignlabs/es6-promisify/blob/main/LICENSE
// cannot be found by the OSS tool automatically.
"name": "es6-promisify",
"fullLicenseText": [
"Copyright (c) 2014 Mike Hall / Digital Design Labs",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all",
"copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE."
]
},
{
// Reason: The license at https://github.com/zkat/json-parse-better-errors/blob/latest/LICENSE.md
// cannot be found by the OSS tool automatically.
"name": "json-parse-better-errors",
"fullLicenseText": [
"Copyright 2017 Kat Marchán",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the",
"\"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,",
"sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following",
"conditions:",
"",
"The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE",
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS",
"OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR",
"OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
]
},
{
// Reason: The license at https://github.com/time-rs/time/blob/main/LICENSE-MIT
// cannot be found by the OSS tool automatically.
"name": "time-rs/time",
"fullLicenseText": [
"Copyright (c) 2019 Jacob Pratt",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in all",
"copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"SOFTWARE."
]
}
]

View file

@ -60,12 +60,12 @@
"git": {
"name": "electron",
"repositoryUrl": "https://github.com/electron/electron",
"commitHash": "bc8fc0d406d32e4c02f3ec9f161deaacbe4f5989"
"commitHash": "5f93e889020d279d5a9cd1ecab080ab467312447"
}
},
"isOnlyProductionDependency": true,
"license": "MIT",
"version": "7.3.1"
"version": "7.3.2"
},
{
"component": {
@ -77,6 +77,40 @@
}
},
"isOnlyProductionDependency": true,
"licenseDetail": [
"Inno Setup License",
"==================",
"",
"Except where otherwise noted, all of the documentation and software included in the Inno Setup",
"package is copyrighted by Jordan Russell.",
"",
"Copyright (C) 1997-2020 Jordan Russell. All rights reserved.",
"Portions Copyright (C) 2000-2020 Martijn Laan. All rights reserved.",
"",
"This software is provided \"as-is,\" without any express or implied warranty. In no event shall the",
"author be held liable for any damages arising from the use of this software.",
"",
"Permission is granted to anyone to use this software for any purpose, including commercial",
"applications, and to alter and redistribute it, provided that the following conditions are met:",
"",
"1. All redistributions of source code files must retain all copyright notices that are currently in",
" place, and this list of conditions without modification.",
"",
"2. All redistributions in binary form must retain all occurrences of the above copyright notice and",
" web site addresses that are currently in place (for example, in the About boxes).",
"",
"3. The origin of this software must not be misrepresented; you must not claim that you wrote the",
" original software. If you use this software to distribute a product, an acknowledgment in the",
" product documentation would be appreciated but is not required.",
"",
"4. Modified versions in source or binary form must be plainly marked as such, and must not be",
" misrepresented as being the original software.",
"",
"",
"Jordan Russell",
"jr-2010 AT jrsoftware.org",
"https://jrsoftware.org/"
],
"version": "5.5.6"
},
{
@ -499,7 +533,7 @@
"git": {
"name": "ripgrep",
"repositoryUrl": "https://github.com/BurntSushi/ripgrep",
"commitHash": "8a7db1a918e969b85cd933d8ed9fa5285b281ba4"
"commitHash": "973de50c9ef451da2cfcdfa86f2b2711d8d6ff48"
}
},
"isOnlyProductionDependency": true,

View file

@ -7,27 +7,15 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const path = require('path');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const clientConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/configurationEditingMain.ts'
},
output: {
filename: 'configurationEditingMain.js'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View file

@ -13,7 +13,7 @@
"onLanguage:jsonc"
],
"main": "./out/configurationEditingMain",
"browser": "./dist/configurationEditingMain",
"browser": "./dist/browser/configurationEditingMain",
"scripts": {
"compile": "gulp compile-extension:configuration-editing",
"watch": "gulp watch-extension:configuration-editing"
@ -117,6 +117,10 @@
"fileMatch": "/.devcontainer.json",
"url": "./schemas/devContainer.schema.json"
},
{
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/nameConfigs/*.json",
"url": "./schemas/attachContainer.schema.json"
},
{
"fileMatch": "%APP_SETTINGS_HOME%/globalStorage/ms-vscode-remote.remote-containers/imageConfigs/*.json",
"url": "./schemas/attachContainer.schema.json"

View file

@ -42,8 +42,8 @@
"description": "An array of extensions that should be installed into the container.",
"items": {
"type": "string",
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
}
},
"postAttachCommand": {

View file

@ -17,8 +17,8 @@
"description": "An array of extensions that should be installed into the container.",
"items": {
"type": "string",
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)$",
"errorMessage": "Expected format '${publisher}.${name}'. Example: 'vscode.csharp'."
"pattern": "^([a-z0-9A-Z][a-z0-9\\-A-Z]*)\\.([a-z0-9A-Z][a-z0-9\\-A-Z]*)(@(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?)?$",
"errorMessage": "Expected format: '${publisher}.${name}' or '${publisher}.${name}@${version}'. Example: 'ms-dotnettools.csharp'."
}
},
"settings": {

View file

@ -19,7 +19,7 @@
"git": {
"name": "textmate/c.tmbundle",
"repositoryUrl": "https://github.com/textmate/c.tmbundle",
"commitHash": "9aa365882274ca52f01722f3dbb169b9539a20ee"
"commitHash": "60daf83b9d45329524f7847a75e9298b3aae5805"
}
},
"licenseDetail": [
@ -42,4 +42,4 @@
}
],
"version": 1
}
}

File diff suppressed because one or more lines are too long

View file

@ -76,17 +76,13 @@ function getCustomDataPathsInAllWorkspaces(): string[] {
function getCustomDataPathsFromAllExtensions(): string[] {
const dataPaths: string[] = [];
for (const extension of extensions.all) {
const contributes = extension.packageJSON && extension.packageJSON.contributes;
if (contributes && contributes.css && contributes.css.customData && Array.isArray(contributes.css.customData)) {
const relativePaths: string[] = contributes.css.customData;
relativePaths.forEach(rp => {
const customData = extension.packageJSON?.contributes?.css?.customData;
if (Array.isArray(customData)) {
for (const rp of customData) {
dataPaths.push(joinPath(extension.extensionUri, rp).toString());
});
}
}
}
return dataPaths;
}

View file

@ -7,11 +7,10 @@
'use strict';
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
const path = require('path');
const clientConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: path.join(__dirname, 'client'),
entry: {
extension: './src/browser/cssClientMain.ts'
@ -19,16 +18,5 @@ const clientConfig = withDefaults({
output: {
filename: 'cssClientMain.js',
path: path.join(__dirname, 'client', 'dist', 'browser')
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js')
}
}
});
clientConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = clientConfig;

View file

@ -807,7 +807,7 @@
]
},
"dependencies": {
"vscode-languageclient": "7.0.0-next.5",
"vscode-languageclient": "7.0.0-next.5.1",
"vscode-nls": "^4.1.2"
},
"devDependencies": {

View file

@ -7,11 +7,10 @@
'use strict';
const withDefaults = require('../../shared.webpack.config');
const withBrowserDefaults = require('../../shared.webpack.config').browser;
const path = require('path');
const serverConfig = withDefaults({
target: 'webworker',
module.exports = withBrowserDefaults({
context: __dirname,
entry: {
extension: './src/browser/cssServerMain.ts',
@ -20,16 +19,5 @@ const serverConfig = withDefaults({
filename: 'cssServerMain.js',
path: path.join(__dirname, 'dist', 'browser'),
libraryTarget: 'var'
},
performance: {
hints: false
},
resolve: {
alias: {
'vscode-nls': path.resolve(__dirname, '../../../build/polyfills/vscode-nls.js')
}
}
});
serverConfig.module.rules[0].use.shift(); // remove nls loader
module.exports = serverConfig;

View file

@ -10,7 +10,7 @@
"main": "./out/node/cssServerMain",
"browser": "./dist/browser/cssServerMain",
"dependencies": {
"vscode-css-languageservice": "4.3.0-next.2",
"vscode-css-languageservice": "^4.3.0",
"vscode-languageserver": "7.0.0-next.3",
"vscode-uri": "^2.1.2"
},

View file

@ -29,7 +29,7 @@ function parseCSSData(source: string): ICSSDataProvider {
}
return newCSSDataProvider({
version: 1,
version: rawData.version || 1,
properties: rawData.properties || [],
atDirectives: rawData.atDirectives || [],
pseudoClasses: rawData.pseudoClasses || [],

View file

@ -701,10 +701,10 @@ to-regex-range@^5.0.1:
dependencies:
is-number "^7.0.0"
vscode-css-languageservice@4.3.0-next.2:
version "4.3.0-next.2"
resolved "https://registry.yarnpkg.com/vscode-css-languageservice/-/vscode-css-languageservice-4.3.0-next.2.tgz#a7a1289d8d68ddcdee55d4f18b12a455acaf5962"
integrity sha512-4h/s/N7wt6If/5EUNMtfAbwWwImH6EvveqZMf9SmQdMMMqekZkRLA68E98hGzuzI13rHEiLckwlAC+RNLq6FXg==
vscode-css-languageservice@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/vscode-css-languageservice/-/vscode-css-languageservice-4.3.0.tgz#40c797d664ab6188cace33cfbb19b037580a9318"
integrity sha512-BkQAMz4oVHjr0oOAz5PdeE72txlLQK7NIwzmclfr+b6fj6I8POwB+VoXvrZLTbWt9hWRgfvgiQRkh5JwrjPJ5A==
dependencies:
vscode-languageserver-textdocument "^1.0.1"
vscode-languageserver-types "3.16.0-next.2"

View file

@ -640,10 +640,10 @@ vscode-jsonrpc@6.0.0-next.2:
resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0-next.2.tgz#3d73f86d812304cb91b9fb1efee40ec60b09ed7f"
integrity sha512-dKQXRYNUY6BHALQJBJlyZyv9oWlYpbJ2vVoQNNVNPLAYQ3hzNp4zy+iSo7zGx1BPXByArJQDWTKLQh8dz3dnNw==
vscode-languageclient@7.0.0-next.5:
version "7.0.0-next.5"
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.tgz#7ae84c598dff360bd2bc64322b74e10e5d0b9cd6"
integrity sha512-ec+fJg+JiNBIdbeKbzssSuORUaVdtLValtiYdNEUCUjpYE+Y6xXPtXwiZOlS/0OB9pC/RLCMxsj16UwWncQhYQ==
vscode-languageclient@7.0.0-next.5.1:
version "7.0.0-next.5.1"
resolved "https://registry.yarnpkg.com/vscode-languageclient/-/vscode-languageclient-7.0.0-next.5.1.tgz#ed93f14e4c2cdccedf15002c7bf8ef9cb638f36c"
integrity sha512-OONvbk3IFpubwF8/Y5uPQaq5J5CEskpeET3SfK4iGlv5OUK+44JawH/SEW5wXuEPpfdMLEMZLuGLU5v5d7N7PQ==
dependencies:
semver "^6.3.0"
vscode-languageserver-protocol "3.16.0-next.4"

View file

@ -16,6 +16,11 @@ import { languages, workspace, Disposable, TextDocument, Uri, Diagnostic, Range,
const product = JSON.parse(fs.readFileSync(path.join(env.appRoot, 'product.json'), { encoding: 'utf-8' }));
const allowedBadgeProviders: string[] = (product.extensionAllowedBadgeProviders || []).map((s: string) => s.toLowerCase());
const allowedBadgeProvidersRegex: RegExp[] = (product.extensionAllowedBadgeProvidersRegex || []).map((r: string) => new RegExp(r));
function isTrustedSVGSource(uri: Uri): boolean {
return allowedBadgeProviders.includes(uri.authority.toLowerCase()) || allowedBadgeProvidersRegex.some(r => r.test(uri.toString()));
}
const httpsRequired = localize('httpsRequired', "Images must use the HTTPS protocol.");
const svgsNotValid = localize('svgsNotValid', "SVGs are not a valid image source.");
@ -321,7 +326,7 @@ export class ExtensionLinter {
diagnostics.push(new Diagnostic(range, message, DiagnosticSeverity.Warning));
}
if (endsWith(uri.path.toLowerCase(), '.svg') && allowedBadgeProviders.indexOf(uri.authority.toLowerCase()) === -1) {
if (endsWith(uri.path.toLowerCase(), '.svg') && !isTrustedSVGSource(uri)) {
const range = new Range(document.positionAt(begin), document.positionAt(end));
diagnostics.push(new Diagnostic(range, svgsNotValid, DiagnosticSeverity.Warning));
}

View file

@ -432,8 +432,8 @@
},
{
"command": "git.unstageSelectedRanges",
"key": "ctrl+k ctrl+u",
"mac": "cmd+k cmd+u",
"key": "ctrl+k ctrl+n",
"mac": "cmd+k cmd+n",
"when": "isInDiffEditor"
},
{
@ -1871,15 +1871,15 @@
{
"view": "explorer",
"contents": "%view.workbench.cloneRepository%",
"when": "config.git.enabled"
"when": "config.git.enabled && git.state == initialized"
}
]
},
"dependencies": {
"byline": "^5.0.0",
"file-type": "^7.2.0",
"iconv-lite": "^0.4.24",
"jschardet": "2.1.1",
"iconv-lite-umd": "0.6.5",
"jschardet": "2.2.1",
"vscode-extension-telemetry": "0.1.1",
"vscode-nls": "^4.0.0",
"vscode-uri": "^2.0.0",

View file

@ -7,7 +7,6 @@ import { Model } from '../model';
import { GitExtension, Repository, API } from './git';
import { ApiRepository, ApiImpl } from './api1';
import { Event, EventEmitter } from 'vscode';
import { latchEvent } from '../util';
export function deprecated(_target: any, key: string, descriptor: any): void {
if (typeof descriptor.value !== 'function') {
@ -26,14 +25,20 @@ export class GitExtensionImpl implements GitExtension {
enabled: boolean = false;
private _onDidChangeEnablement = new EventEmitter<boolean>();
readonly onDidChangeEnablement: Event<boolean> = latchEvent(this._onDidChangeEnablement.event);
readonly onDidChangeEnablement: Event<boolean> = this._onDidChangeEnablement.event;
private _model: Model | undefined = undefined;
set model(model: Model | undefined) {
this._model = model;
this.enabled = !!model;
const enabled = !!model;
if (this.enabled === enabled) {
return;
}
this.enabled = enabled;
this._onDidChangeEnablement.fire(this.enabled);
}
@ -73,4 +78,4 @@ export class GitExtensionImpl implements GitExtension {
return new ApiImpl(this._model);
}
}
}

View file

@ -8,15 +8,15 @@ import * as path from 'path';
import { Repository, GitResourceGroup } from './repository';
import { Model } from './model';
import { debounce } from './decorators';
import { filterEvent, dispose, anyEvent, fireEvent } from './util';
import { filterEvent, dispose, anyEvent, fireEvent, PromiseSource } from './util';
import { GitErrorCodes, Status } from './api/git';
type Callback = { resolve: (status: boolean) => void, reject: (err: any) => void };
class GitIgnoreDecorationProvider implements DecorationProvider {
private static Decoration: Decoration = { priority: 3, color: new ThemeColor('gitDecoration.ignoredResourceForeground') };
readonly onDidChangeDecorations: Event<Uri[]>;
private queue = new Map<string, { repository: Repository; queue: Map<string, Callback>; }>();
private queue = new Map<string, { repository: Repository; queue: Map<string, PromiseSource<Decoration | undefined>>; }>();
private disposables: Disposable[] = [];
constructor(private model: Model) {
@ -29,32 +29,29 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
this.disposables.push(window.registerDecorationProvider(this));
}
provideDecoration(uri: Uri): Promise<Decoration | undefined> {
async provideDecoration(uri: Uri): Promise<Decoration | undefined> {
const repository = this.model.getRepository(uri);
if (!repository) {
return Promise.resolve(undefined);
return;
}
let queueItem = this.queue.get(repository.root);
if (!queueItem) {
queueItem = { repository, queue: new Map<string, Callback>() };
queueItem = { repository, queue: new Map<string, PromiseSource<Decoration | undefined>>() };
this.queue.set(repository.root, queueItem);
}
return new Promise<boolean>((resolve, reject) => {
queueItem!.queue.set(uri.fsPath, { resolve, reject });
let promiseSource = queueItem.queue.get(uri.fsPath);
if (!promiseSource) {
promiseSource = new PromiseSource();
queueItem!.queue.set(uri.fsPath, promiseSource);
this.checkIgnoreSoon();
}).then(ignored => {
if (ignored) {
return <Decoration>{
priority: 3,
color: new ThemeColor('gitDecoration.ignoredResourceForeground')
};
}
return undefined;
});
}
return await promiseSource.promise;
}
@debounce(500)
@ -66,16 +63,16 @@ class GitIgnoreDecorationProvider implements DecorationProvider {
const paths = [...item.queue.keys()];
item.repository.checkIgnore(paths).then(ignoreSet => {
for (const [key, value] of item.queue.entries()) {
value.resolve(ignoreSet.has(key));
for (const [path, promiseSource] of item.queue.entries()) {
promiseSource.resolve(ignoreSet.has(path) ? GitIgnoreDecorationProvider.Decoration : undefined);
}
}, err => {
if (err.gitErrorCode !== GitErrorCodes.IsInSubmodule) {
console.error(err);
}
for (const [, value] of item.queue.entries()) {
value.reject(err);
for (const [, promiseSource] of item.queue.entries()) {
promiseSource.reject(err);
}
});
}

View file

@ -9,7 +9,7 @@ import * as os from 'os';
import * as cp from 'child_process';
import * as which from 'which';
import { EventEmitter } from 'events';
import iconv = require('iconv-lite');
import * as iconv from 'iconv-lite-umd';
import * as filetype from 'file-type';
import { assign, groupBy, IDisposable, toDisposable, dispose, mkdirp, readBytes, detectUnicodeEncoding, Encoding, onceEvent, splitInChunks, Limiter } from './util';
import { CancellationToken, Progress, Uri } from 'vscode';
@ -1939,6 +1939,17 @@ export class Repository {
return message.replace(/^\s*#.*$\n?/gm, '').trim();
}
async getSquashMessage(): Promise<string | undefined> {
const squashMsgPath = path.join(this.repositoryRoot, '.git', 'SQUASH_MSG');
try {
const raw = await fs.readFile(squashMsgPath, 'utf8');
return this.stripCommitMessageComments(raw);
} catch {
return undefined;
}
}
async getMergeMessage(): Promise<string | undefined> {
const mergeMsgPath = path.join(this.repositoryRoot, '.git', 'MERGE_MSG');

View file

@ -175,7 +175,7 @@ export async function activate(context: ExtensionContext): Promise<GitExtension>
return result;
}
async function checkGitVersion(info: IGit): Promise<void> {
async function checkGitv1(info: IGit): Promise<void> {
const config = workspace.getConfiguration('git');
const shouldIgnore = config.get<boolean>('ignoreLegacyWarning') === true;
@ -202,3 +202,27 @@ async function checkGitVersion(info: IGit): Promise<void> {
await config.update('ignoreLegacyWarning', true, true);
}
}
async function checkGitWindows(info: IGit): Promise<void> {
if (!/^2\.(25|26)\./.test(info.version)) {
return;
}
const update = localize('updateGit', "Update Git");
const choice = await window.showWarningMessage(
localize('git2526', "There are known issues with the installed Git {0}. Please update to Git >= 2.27 for the git features to work correctly.", info.version),
update
);
if (choice === update) {
commands.executeCommand('vscode.open', Uri.parse('https://git-scm.com/'));
}
}
async function checkGitVersion(info: IGit): Promise<void> {
await checkGitv1(info);
if (process.platform === 'win32') {
await checkGitWindows(info);
}
}

View file

@ -537,7 +537,7 @@ class DotGitWatcher implements IFileWatcher {
upstreamWatcher.event(this.emitter.fire, this.emitter, this.transientDisposables);
} catch (err) {
if (Log.logLevel <= LogLevel.Error) {
this.outputChannel.appendLine(`Failed to watch ref '${upstreamPath}', is most likely packed.\n${err.stack || err}`);
this.outputChannel.appendLine(`Warning: Failed to watch ref '${upstreamPath}', is most likely packed.`);
}
}
}
@ -729,10 +729,10 @@ export class Repository implements Disposable {
this.updateInputBoxPlaceholder();
this.disposables.push(this.onDidRunGitStatus(() => this.updateInputBoxPlaceholder()));
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "MERGE CHANGES"));
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "STAGED CHANGES"));
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "CHANGES"));
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "UNTRACKED CHANGES"));
this._mergeGroup = this._sourceControl.createResourceGroup('merge', localize('merge changes', "Merge Changes"));
this._indexGroup = this._sourceControl.createResourceGroup('index', localize('staged changes', "Staged Changes"));
this._workingTreeGroup = this._sourceControl.createResourceGroup('workingTree', localize('changes', "Changes"));
this._untrackedGroup = this._sourceControl.createResourceGroup('untracked', localize('untracked changes', "Untracked Changes"));
const updateIndexGroupVisibility = () => {
const config = workspace.getConfiguration('git', root);
@ -865,10 +865,10 @@ export class Repository implements Disposable {
}
async getInputTemplate(): Promise<string> {
const mergeMessage = await this.repository.getMergeMessage();
const commitMessage = (await Promise.all([this.repository.getMergeMessage(), this.repository.getSquashMessage()])).find(msg => !!msg);
if (mergeMessage) {
return mergeMessage;
if (commitMessage) {
return commitMessage;
}
return await this.repository.getCommitTemplate();

View file

@ -3,7 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Event, Disposable } from 'vscode';
import { Event, Disposable, EventEmitter } from 'vscode';
import { dirname, sep } from 'path';
import { Readable } from 'stream';
import { promises as fs, createReadStream } from 'fs';
@ -44,18 +44,6 @@ export function filterEvent<T>(event: Event<T>, filter: (e: T) => boolean): Even
return (listener: (e: T) => any, thisArgs?: any, disposables?: Disposable[]) => event(e => filter(e) && listener.call(thisArgs, e), null, disposables);
}
export function latchEvent<T>(event: Event<T>): Event<T> {
let firstCall = true;
let cache: T;
return filterEvent(event, value => {
let shouldEmit = firstCall || value !== cache;
firstCall = false;
cache = value;
return shouldEmit;
});
}
export function anyEvent<T>(...events: Event<T>[]): Event<T> {
return (listener: (e: T) => any, thisArgs?: any, disposables?: Disposable[]) => {
const result = combinedDisposable(events.map(event => event(i => listener.call(thisArgs, i))));
@ -400,3 +388,39 @@ export class Limiter<T> {
}
}
}
type Completion<T> = { success: true, value: T } | { success: false, err: any };
export class PromiseSource<T> {
private _onDidComplete = new EventEmitter<Completion<T>>();
private _promise: Promise<T> | undefined;
get promise(): Promise<T> {
if (this._promise) {
return this._promise;
}
return eventToPromise(this._onDidComplete.event).then(completion => {
if (completion.success) {
return completion.value;
} else {
throw completion.err;
}
});
}
resolve(value: T): void {
if (!this._promise) {
this._promise = Promise.resolve(value);
this._onDidComplete.fire({ success: true, value });
}
}
reject(err: any): void {
if (!this._promise) {
this._promise = Promise.reject(err);
this._onDidComplete.fire({ success: false, err });
}
}
}

View file

@ -425,12 +425,10 @@ https-proxy-agent@^2.2.1:
agent-base "^4.3.0"
debug "^3.1.0"
iconv-lite@^0.4.24:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
dependencies:
safer-buffer ">= 2.1.2 < 3"
iconv-lite-umd@0.6.5:
version "0.6.5"
resolved "https://registry.yarnpkg.com/iconv-lite-umd/-/iconv-lite-umd-0.6.5.tgz#6a1f621a3b4d125f72feff813a9839e1ebd6c722"
integrity sha512-WDegH4al+e3n3jTOStRvm+jzDA3JMUQGgzdAsMxAgcgB0Oi72HjfdsoX08ieKsy3rKexXVjWZr41aOIUaCZnMg==
inflight@^1.0.4:
version "1.0.6"
@ -470,10 +468,10 @@ jsbn@~0.1.0:
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM=
jschardet@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-2.1.1.tgz#af6f8fd0b3b0f5d46a8fd9614a4fce490575c184"
integrity sha512-pA5qG9Zwm8CBpGlK/lo2GE9jPxwqRgMV7Lzc/1iaPccw6v4Rhj8Zg2BTyrdmHmxlJojnbLupLeRnaPLsq03x6Q==
jschardet@2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-2.2.1.tgz#03b0264669a90c7a5c436a68c5a7d4e4cb0c9823"
integrity sha512-Ks2JNuUJoc7PGaZ7bVFtSEvOcr0rBq6Q1J5/7+zKWLT+g+4zziL63O0jg7y2jxhzIa1LVsHUbPXrbaWmz9iwDw==
json-schema-traverse@^0.4.1:
version "0.4.1"
@ -748,7 +746,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.2:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==

View file

@ -8,11 +8,10 @@
'use strict';
const path = require('path');
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
module.exports = withDefaults({
module.exports = withBrowserDefaults({
context: __dirname,
target: 'webworker',
node: false,
entry: {
extension: './src/extension.ts',
@ -20,30 +19,10 @@ module.exports = withDefaults({
externals: {
'keytar': 'commonjs keytar',
},
// TODO@eamodio Deal with nls properly for the browser
// Specify module here, so we can stop the vscode-nls-dev loader from overwriting nls calls
module: {
rules: [{
test: /\.ts$/,
exclude: /node_modules/,
use: [{
// configure TypeScript loader:
// * enable sources maps for end-to-end source maps
loader: 'ts-loader',
options: {
compilerOptions: {
'sourceMap': true,
}
}
}]
}]
},
resolve: {
alias: {
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
'vscode-extension-telemetry': path.resolve(__dirname, '../../build/polyfills/vscode-extension-telemetry.js'),
'vscode-nls': path.resolve(__dirname, '../../build/polyfills/vscode-nls.js'),
'uuid': path.resolve(__dirname, 'node_modules/uuid/dist/esm-browser/index.js')
},
}
}
});

View file

@ -32,7 +32,7 @@
},
"aiKey": "AIF-d9b70cd4-b9f9-4d70-929b-a071c400b217",
"main": "./out/extension.js",
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension.js",
"scripts": {
"compile": "gulp compile-extension:github-authentication",
"compile-web": "npx webpack-cli --config extension-browser.webpack.config --mode none",

View file

@ -22,7 +22,7 @@ interface SessionData {
}
export class GitHubAuthenticationProvider {
private _sessions: vscode.AuthenticationSession2[] = [];
private _sessions: vscode.AuthenticationSession[] = [];
private _githubServer = new GitHubServer();
public async initialize(): Promise<void> {
@ -37,7 +37,7 @@ export class GitHubAuthenticationProvider {
private pollForChange() {
setTimeout(async () => {
let storedSessions: vscode.AuthenticationSession2[];
let storedSessions: vscode.AuthenticationSession[];
try {
storedSessions = await this.readSessions();
} catch (e) {
@ -80,12 +80,12 @@ export class GitHubAuthenticationProvider {
}, 1000 * 30);
}
private async readSessions(): Promise<vscode.AuthenticationSession2[]> {
private async readSessions(): Promise<vscode.AuthenticationSession[]> {
const storedSessions = await keychain.getToken();
if (storedSessions) {
try {
const sessionData: SessionData[] = JSON.parse(storedSessions);
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession2> => {
const sessionPromises = sessionData.map(async (session: SessionData): Promise<vscode.AuthenticationSession> => {
const needsUserInfo = !session.account;
let userInfo: { id: string, accountName: string };
if (needsUserInfo) {
@ -121,11 +121,11 @@ export class GitHubAuthenticationProvider {
await keychain.setToken(JSON.stringify(this._sessions));
}
get sessions(): vscode.AuthenticationSession2[] {
get sessions(): vscode.AuthenticationSession[] {
return this._sessions;
}
public async login(scopes: string): Promise<vscode.AuthenticationSession2> {
public async login(scopes: string): Promise<vscode.AuthenticationSession> {
const token = await this._githubServer.login(scopes);
const session = await this.tokenToSession(token, scopes.split(' '));
await this.setToken(session);
@ -136,12 +136,12 @@ export class GitHubAuthenticationProvider {
this._githubServer.manuallyProvideToken();
}
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession2> {
private async tokenToSession(token: string, scopes: string[]): Promise<vscode.AuthenticationSession> {
const userInfo = await this._githubServer.getUserInfo(token);
return new vscode.AuthenticationSession2(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
return new vscode.AuthenticationSession(uuid(), token, { displayName: userInfo.accountName, id: userInfo.id }, scopes);
}
private async setToken(session: vscode.AuthenticationSession2): Promise<void> {
private async setToken(session: vscode.AuthenticationSession): Promise<void> {
const sessionIndex = this._sessions.findIndex(s => s.id === session.id);
if (sessionIndex > -1) {
this._sessions.splice(sessionIndex, 1, session);

View file

@ -7,18 +7,19 @@
'use strict';
const path = require('path');
const withDefaults = require('../shared.webpack.config');
const withBrowserDefaults = require('../shared.webpack.config').browser;
module.exports = withDefaults({
const config = withBrowserDefaults({
context: __dirname,
target: 'webworker',
node: false,
entry: {
extension: './src/extension.ts',
extension: './src/extension.ts'
},
resolve: {
alias: {
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js'),
},
'node-fetch': path.resolve(__dirname, 'node_modules/node-fetch/browser.js')
}
}
});
module.exports = config;

View file

@ -13,11 +13,102 @@
"Other"
],
"activationEvents": [
"onFileSystem:github"
"onFileSystem:codespace",
"onFileSystem:github",
"onCommand:githubBrowser.openRepository"
],
"browser": "./dist/extension.js",
"browser": "./dist/browser/extension.js",
"main": "./out/extension.js",
"contributes": {
"commands": [
{
"command": "githubBrowser.openRepository",
"title": "Open GitHub Repository...",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.commit",
"title": "Commit",
"icon": "$(check)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.discardChanges",
"title": "Discard Changes",
"icon": "$(discard)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.openChanges",
"title": "Open Changes",
"icon": "$(git-compare)",
"category": "GitHub Browser"
},
{
"command": "githubBrowser.openFile",
"title": "Open File",
"icon": "$(go-to-file)",
"category": "GitHub Browser"
}
],
"menus": {
"commandPalette": [
{
"command": "githubBrowser.openRepository",
"when": "config.githubBrowser.openRepository"
},
{
"command": "githubBrowser.commit",
"when": "false"
},
{
"command": "githubBrowser.discardChanges",
"when": "false"
},
{
"command": "githubBrowser.openChanges",
"when": "false"
},
{
"command": "githubBrowser.openFile",
"when": "false"
}
],
"scm/title": [
{
"command": "githubBrowser.commit",
"group": "navigation",
"when": "scmProvider == github"
}
],
"scm/resourceState/context": [
{
"command": "githubBrowser.openFile",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "inline@0"
},
{
"command": "githubBrowser.discardChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "inline@1"
},
{
"command": "githubBrowser.openChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "navigation@0"
},
{
"command": "githubBrowser.openFile",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "navigation@1"
},
{
"command": "githubBrowser.discardChanges",
"when": "scmProvider == github && scmResourceGroup == github.changes",
"group": "1_modification@0"
}
]
},
"resourceLabelFormatters": [
{
"scheme": "github",
@ -36,6 +127,24 @@
"separator": "/",
"workspaceSuffix": "GitHub"
}
},
{
"scheme": "codespace",
"authority": "HEAD",
"formatting": {
"label": "github.com${path}",
"separator": "/",
"workspaceSuffix": "GitHub"
}
},
{
"scheme": "codespace",
"authority": "*",
"formatting": {
"label": "github.com${path} (${authority})",
"separator": "/",
"workspaceSuffix": "GitHub"
}
}
]
},
@ -47,14 +156,13 @@
"vscode:prepublish": "npm run compile"
},
"dependencies": {
"@octokit/graphql": "4.5.0",
"@octokit/rest": "17.11.0",
"@octokit/graphql": "4.5.1",
"@octokit/rest": "18.0.0",
"fuzzysort": "1.1.4",
"node-fetch": "2.6.0"
"node-fetch": "2.6.0",
"vscode-nls": "4.1.2"
},
"devDependencies": {
"@types/node-fetch": "2.5.7",
"webpack": "4.43.0",
"webpack-cli": "3.3.11"
"@types/node-fetch": "2.5.7"
}
}

View file

@ -0,0 +1,380 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { commands, Event, EventEmitter, FileStat, FileType, Memento, TextDocumentShowOptions, Uri, ViewColumn } from 'vscode';
import { getRootUri, getRelativePath, isChild } from './extension';
import { sha1 } from './sha1';
const textDecoder = new TextDecoder();
interface CreateOperation<T extends string | Uri = string> {
type: 'created';
size: number;
timestamp: number;
uri: T;
hash: string;
originalHash: string;
}
interface ChangeOperation<T extends string | Uri = string> {
type: 'changed';
size: number;
timestamp: number;
uri: T;
hash: string;
originalHash: string;
}
interface DeleteOperation<T extends string | Uri = string> {
type: 'deleted';
size: undefined;
timestamp: number;
uri: T;
hash: undefined;
originalHash: undefined;
}
export type Operation = CreateOperation<Uri> | ChangeOperation<Uri> | DeleteOperation<Uri>;
type StoredOperation = CreateOperation | ChangeOperation | DeleteOperation;
const workingOperationsKeyPrefix = 'github.working.changes|';
const workingFileKeyPrefix = 'github.working|';
function fromSerialized(operations: StoredOperation): Operation {
return { ...operations, uri: Uri.parse(operations.uri) };
}
export interface ChangeStoreEvent {
type: 'created' | 'changed' | 'deleted';
rootUri: Uri;
uri: Uri;
}
function toChangeStoreEvent(operation: Operation | StoredOperation, rootUri: Uri, uri?: Uri): ChangeStoreEvent {
return {
type: operation.type,
rootUri: rootUri,
uri: uri ?? (typeof operation.uri === 'string' ? Uri.parse(operation.uri) : operation.uri),
};
}
export interface IChangeStore {
onDidChange: Event<ChangeStoreEvent>;
acceptAll(rootUri: Uri): Promise<void>;
discard(uri: Uri): Promise<void>;
discardAll(rootUri: Uri): Promise<void>;
hasChanges(rootUri: Uri): boolean;
getChanges(rootUri: Uri): Operation[];
getContent(uri: Uri): string | undefined;
openChanges(uri: Uri, original: Uri): void;
openFile(uri: Uri): void;
}
export interface IWritableChangeStore {
onDidChange: Event<ChangeStoreEvent>;
hasChanges(rootUri: Uri): boolean;
getContent(uri: Uri): string | undefined;
getStat(uri: Uri): FileStat | undefined;
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][];
onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void>;
onFileCreated(uri: Uri, content: Uint8Array): Promise<void>;
onFileDeleted(uri: Uri): Promise<void>;
}
export class ChangeStore implements IChangeStore, IWritableChangeStore {
private _onDidChange = new EventEmitter<ChangeStoreEvent>();
get onDidChange(): Event<ChangeStoreEvent> {
return this._onDidChange.event;
}
constructor(private readonly memento: Memento) { }
async acceptAll(rootUri: Uri): Promise<void> {
const operations = this.getChanges(rootUri);
await this.saveWorkingOperations(rootUri, undefined);
const events: ChangeStoreEvent[] = [];
for (const operation of operations) {
await this.discardWorkingContent(operation.uri);
events.push(toChangeStoreEvent(operation, rootUri));
}
for (const e of events) {
this._onDidChange.fire(e);
}
}
async discard(uri: Uri): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const index = operations.findIndex(c => c.uri === key);
if (index === -1) {
return;
}
const [operation] = operations.splice(index, 1);
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
this._onDidChange.fire({
type: operation.type === 'created' ? 'deleted' : operation.type === 'deleted' ? 'created' : 'changed',
rootUri: rootUri,
uri: uri,
});
}
async discardAll(rootUri: Uri): Promise<void> {
const operations = this.getChanges(rootUri);
await this.saveWorkingOperations(rootUri, undefined);
const events: ChangeStoreEvent[] = [];
for (const operation of operations) {
await this.discardWorkingContent(operation.uri);
events.push(toChangeStoreEvent(operation, rootUri));
}
for (const e of events) {
this._onDidChange.fire(e);
}
}
getChanges(rootUri: Uri) {
return this.getWorkingOperations(rootUri).map(c => fromSerialized(c));
}
getContent(uri: Uri): string | undefined {
return this.memento.get(`${workingFileKeyPrefix}${uri.toString()}`);
}
getStat(uri: Uri): FileStat | undefined {
const key = uri.toString();
const operation = this.getChanges(getRootUri(uri)!).find(c => c.uri.toString() === key);
if (operation === undefined) {
return undefined;
}
return {
type: FileType.File,
size: operation.size ?? 0,
ctime: 0,
mtime: operation.timestamp
};
}
hasChanges(rootUri: Uri): boolean {
return this.getWorkingOperations(rootUri).length !== 0;
}
updateDirectoryEntries(uri: Uri, entries: [string, FileType][]): [string, FileType][] {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return entries;
}
const folderPath = getRelativePath(rootUri, uri);
const operations = this.getChanges(rootUri);
for (const operation of operations) {
switch (operation.type) {
case 'changed':
continue;
case 'created': {
const filePath = getRelativePath(rootUri, operation.uri);
if (isChild(folderPath, filePath)) {
entries.push([filePath, FileType.File]);
}
break;
}
case 'deleted': {
const filePath = getRelativePath(rootUri, operation.uri);
if (isChild(folderPath, filePath)) {
const index = entries.findIndex(([path]) => path === filePath);
if (index !== -1) {
entries.splice(index, 1);
}
}
break;
}
}
}
return entries;
}
async onFileChanged(uri: Uri, content: Uint8Array, originalContent: () => Uint8Array | Thenable<Uint8Array>): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const hash = await sha1(content);
let operation = operations.find(c => c.uri === key);
if (operation === undefined) {
const originalHash = await sha1(await originalContent!());
if (hash === originalHash) {
return;
}
operation = {
type: 'changed',
size: content.byteLength,
timestamp: Date.now(),
uri: key,
hash: hash!,
originalHash: originalHash
} as ChangeOperation;
operations.push(operation);
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
} else if (hash! === operation.originalHash) {
operations.splice(operations.indexOf(operation), 1);
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
} else if (operation.hash !== hash) {
operation.hash = hash!;
operation.timestamp = Date.now();
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
}
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async onFileCreated(uri: Uri, content: Uint8Array): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
const hash = await sha1(content);
let operation = operations.find(c => c.uri === key);
if (operation === undefined) {
operation = {
type: 'created',
size: content.byteLength,
timestamp: Date.now(),
uri: key,
hash: hash!,
originalHash: hash!
} as CreateOperation;
operations.push(operation);
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
} else {
// Shouldn't happen, but if it does just update the contents
operation.hash = hash!;
operation.timestamp = Date.now();
await this.saveWorkingOperations(rootUri, operations);
await this.saveWorkingContent(uri, textDecoder.decode(content));
}
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async onFileDeleted(uri: Uri): Promise<void> {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return;
}
const key = uri.toString();
const operations = this.getWorkingOperations(rootUri);
let operation = operations.find(c => c.uri === key);
if (operation !== undefined) {
operations.splice(operations.indexOf(operation), 1);
}
const wasCreated = operation?.type === 'created';
operation = {
type: 'deleted',
timestamp: Date.now(),
uri: key,
} as DeleteOperation;
// Only track the delete, if we weren't tracking the create
if (!wasCreated) {
operations.push(operation);
}
await this.saveWorkingOperations(rootUri, operations);
await this.discardWorkingContent(uri);
this._onDidChange.fire(toChangeStoreEvent(operation, rootUri, uri));
}
async openChanges(uri: Uri, original: Uri) {
const opts: TextDocumentShowOptions = {
preserveFocus: false,
preview: true,
viewColumn: ViewColumn.Active
};
await commands.executeCommand('vscode.diff', original, uri, `${uri.fsPath} (Working Tree)`, opts);
}
async openFile(uri: Uri) {
const opts: TextDocumentShowOptions = {
preserveFocus: false,
preview: false,
viewColumn: ViewColumn.Active
};
await commands.executeCommand('vscode.open', uri, opts);
}
private getWorkingOperations(rootUri: Uri): StoredOperation[] {
return this.memento.get(`${workingOperationsKeyPrefix}${rootUri.toString()}`, []);
}
private async saveWorkingOperations(rootUri: Uri, operations: StoredOperation[] | undefined): Promise<void> {
await this.memento.update(`${workingOperationsKeyPrefix}${rootUri.toString()}`, operations);
}
private async saveWorkingContent(uri: Uri, content: string): Promise<void> {
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, content);
}
private async discardWorkingContent(uri: Uri): Promise<void> {
await this.memento.update(`${workingFileKeyPrefix}${uri.toString()}`, undefined);
}
}

View file

@ -0,0 +1,53 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { Event, EventEmitter, Memento, Uri, workspace } from 'vscode';
export interface WorkspaceFolderContext<T> {
context: T;
name: string;
folderUri: Uri;
}
export class ContextStore<T> {
private _onDidChange = new EventEmitter<Uri>();
get onDidChange(): Event<Uri> {
return this._onDidChange.event;
}
constructor(
private readonly scheme: string,
private readonly originalScheme: string,
private readonly memento: Memento,
) { }
delete(uri: Uri) {
return this.set(uri, undefined);
}
get(uri: Uri): T | undefined {
return this.memento.get<T>(`${this.originalScheme}.context|${this.getOriginalResource(uri).toString()}`);
}
getForWorkspace(): WorkspaceFolderContext<T>[] {
const folders = workspace.workspaceFolders?.filter(f => f.uri.scheme === this.scheme || f.uri.scheme === this.originalScheme) ?? [];
return folders.map(f => ({ context: this.get(f.uri)!, name: f.name, folderUri: f.uri })).filter(c => c.context !== undefined);
}
async set(uri: Uri, context: T | undefined) {
uri = this.getOriginalResource(uri);
await this.memento.update(`${this.originalScheme}.context|${uri.toString()}`, context);
this._onDidChange.fire(uri);
}
getOriginalResource(uri: Uri): Uri {
return uri.with({ scheme: this.originalScheme });
}
getWorkspaceResource(uri: Uri): Uri {
return uri.with({ scheme: this.scheme });
}
}

View file

@ -3,9 +3,78 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as vscode from 'vscode';
import { GitHubFS } from './githubfs';
import { commands, ExtensionContext, Uri, window, workspace } from 'vscode';
import { ChangeStore } from './changeStore';
import { ContextStore } from './contextStore';
import { VirtualFS } from './fs';
import { GitHubApiContext, GitHubApi } from './github/api';
import { GitHubFS } from './github/fs';
import { VirtualSCM } from './scm';
import { StatusBar } from './statusbar';
export function activate(context: vscode.ExtensionContext) {
context.subscriptions.push(new GitHubFS());
const repositoryRegex = /^(?:(?:https:\/\/)?github.com\/)?([^\/]+)\/([^\/]+?)(?:\/|.git|$)/i;
export async function activate(context: ExtensionContext) {
const contextStore = new ContextStore<GitHubApiContext>('codespace', GitHubFS.scheme, context.workspaceState);
const changeStore = new ChangeStore(context.workspaceState);
const githubApi = new GitHubApi(contextStore);
const gitHubFS = new GitHubFS(githubApi);
const virtualFS = new VirtualFS('codespace', contextStore, changeStore, gitHubFS);
context.subscriptions.push(
githubApi,
gitHubFS,
virtualFS,
new VirtualSCM(GitHubFS.scheme, githubApi, changeStore),
new StatusBar(contextStore, changeStore),
);
commands.registerCommand('githubBrowser.openRepository', async () => {
const value = await window.showInputBox({
placeHolder: 'e.g. https://github.com/microsoft/vscode',
prompt: 'Enter a GitHub repository url',
validateInput: value => repositoryRegex.test(value) ? undefined : 'Invalid repository url'
});
if (value) {
const match = repositoryRegex.exec(value);
if (match) {
const [, owner, repo] = match;
const uri = Uri.parse(`codespace://HEAD/${owner}/${repo}`);
openWorkspace(uri, repo, 'currentWindow');
}
}
});
}
export function getRelativePath(rootUri: Uri, uri: Uri) {
return uri.path.substr(rootUri.path.length + 1);
}
export function getRootUri(uri: Uri) {
return workspace.getWorkspaceFolder(uri)?.uri;
}
export function isChild(folderPath: string, filePath: string) {
return isDescendent(folderPath, filePath) && filePath.substr(folderPath.length + (folderPath.endsWith('/') ? 0 : 1)).split('/').length === 1;
}
export function isDescendent(folderPath: string, filePath: string) {
return folderPath.length === 0 || filePath.startsWith(folderPath.endsWith('/') ? folderPath : `${folderPath}/`);
}
const shaRegex = /^[0-9a-f]{40}$/;
export function isSha(ref: string) {
return shaRegex.test(ref);
}
function openWorkspace(uri: Uri, name: string, location: 'currentWindow' | 'newWindow' | 'addToCurrentWorkspace') {
if (location === 'addToCurrentWorkspace') {
const count = (workspace.workspaceFolders && workspace.workspaceFolders.length) || 0;
return workspace.updateWorkspaceFolders(count, 0, { uri: uri, name: name });
}
return commands.executeCommand('vscode.openFolder', uri, location === 'newWindow');
}

View file

@ -0,0 +1,216 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import {
CancellationToken,
Disposable,
Event,
EventEmitter,
FileChangeEvent,
FileChangeType,
FileSearchOptions,
FileSearchProvider,
FileSearchQuery,
FileStat,
FileSystemError,
FileSystemProvider,
FileType,
Progress,
TextSearchOptions,
TextSearchProvider,
TextSearchQuery,
TextSearchResult,
Uri,
workspace,
} from 'vscode';
import { IWritableChangeStore } from './changeStore';
import { ContextStore } from './contextStore';
import { GitHubApiContext } from './github/api';
const emptyDisposable = { dispose: () => { /* noop */ } };
const textEncoder = new TextEncoder();
export class VirtualFS implements FileSystemProvider, FileSearchProvider, TextSearchProvider, Disposable {
private _onDidChangeFile = new EventEmitter<FileChangeEvent[]>();
get onDidChangeFile(): Event<FileChangeEvent[]> {
return this._onDidChangeFile.event;
}
private readonly disposable: Disposable;
constructor(
readonly scheme: string,
private readonly contextStore: ContextStore<GitHubApiContext>,
private readonly changeStore: IWritableChangeStore,
private readonly fs: FileSystemProvider & FileSearchProvider & TextSearchProvider
) {
// TODO@eamodio listen for workspace folder changes
for (const context of contextStore.getForWorkspace()) {
// If we have a saved context, but no longer have any changes, reset the context
// We only do this on startup/reload to keep things consistent
if (!changeStore.hasChanges(context.folderUri)) {
console.log('Clear context', context.folderUri.toString());
contextStore.delete(context.folderUri);
}
}
this.disposable = Disposable.from(
workspace.registerFileSystemProvider(scheme, this, { isCaseSensitive: true }),
workspace.registerFileSearchProvider(scheme, this),
workspace.registerTextSearchProvider(scheme, this),
changeStore.onDidChange(e => {
switch (e.type) {
case 'created':
this._onDidChangeFile.fire([{ type: FileChangeType.Created, uri: e.uri }]);
break;
case 'changed':
this._onDidChangeFile.fire([{ type: FileChangeType.Changed, uri: e.uri }]);
break;
case 'deleted':
this._onDidChangeFile.fire([{ type: FileChangeType.Deleted, uri: e.uri }]);
break;
}
}),
);
}
dispose() {
this.disposable?.dispose();
}
private getOriginalResource(uri: Uri): Uri {
return this.contextStore.getOriginalResource(uri);
}
private getWorkspaceResource(uri: Uri): Uri {
return this.contextStore.getWorkspaceResource(uri);
}
//#region FileSystemProvider
watch(): Disposable {
return emptyDisposable;
}
async stat(uri: Uri): Promise<FileStat> {
let stat = this.changeStore.getStat(uri);
if (stat !== undefined) {
return stat;
}
stat = await this.fs.stat(this.getOriginalResource(uri));
return stat;
}
async readDirectory(uri: Uri): Promise<[string, FileType][]> {
let entries = await this.fs.readDirectory(this.getOriginalResource(uri));
entries = this.changeStore.updateDirectoryEntries(uri, entries);
return entries;
}
createDirectory(_uri: Uri): void | Thenable<void> {
// TODO@eamodio only support files for now
throw FileSystemError.NoPermissions();
}
async readFile(uri: Uri): Promise<Uint8Array> {
const content = this.changeStore.getContent(uri);
if (content !== undefined) {
return textEncoder.encode(content);
}
const data = await this.fs.readFile(this.getOriginalResource(uri));
return data;
}
async writeFile(uri: Uri, content: Uint8Array, options: { create: boolean, overwrite: boolean }): Promise<void> {
let stat;
try {
stat = await this.stat(uri);
if (!options.overwrite) {
throw FileSystemError.FileExists();
}
} catch (ex) {
if (ex instanceof FileSystemError && ex.code === 'FileNotFound') {
if (!options.create) {
throw FileSystemError.FileNotFound();
}
} else {
throw ex;
}
}
if (stat === undefined) {
await this.changeStore.onFileCreated(uri, content);
} else {
await this.changeStore.onFileChanged(uri, content, () => this.fs.readFile(this.getOriginalResource(uri)));
}
}
async delete(uri: Uri, _options: { recursive: boolean }): Promise<void> {
const stat = await this.stat(uri);
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
await this.changeStore.onFileDeleted(uri);
}
async rename(oldUri: Uri, newUri: Uri, options: { overwrite: boolean }): Promise<void> {
const stat = await this.stat(oldUri);
// TODO@eamodio only support files for now
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
const content = await this.readFile(oldUri);
await this.writeFile(newUri, content, { create: true, overwrite: options.overwrite });
await this.delete(oldUri, { recursive: false });
}
async copy(source: Uri, destination: Uri, options: { overwrite: boolean }): Promise<void> {
const stat = await this.stat(source);
// TODO@eamodio only support files for now
if (stat.type !== FileType.File) {
throw FileSystemError.NoPermissions();
}
const content = await this.readFile(source);
await this.writeFile(destination, content, { create: true, overwrite: options.overwrite });
}
//#endregion
//#region FileSearchProvider
provideFileSearchResults(
query: FileSearchQuery,
options: FileSearchOptions,
token: CancellationToken,
) {
return this.fs.provideFileSearchResults(query, { ...options, folder: this.getOriginalResource(options.folder) }, token);
}
//#endregion
//#region TextSearchProvider
provideTextSearchResults(
query: TextSearchQuery,
options: TextSearchOptions,
progress: Progress<TextSearchResult>,
token: CancellationToken,
) {
return this.fs.provideTextSearchResults(
query,
{ ...options, folder: this.getOriginalResource(options.folder) },
{ report: (result: TextSearchResult) => progress.report({ ...result, uri: this.getWorkspaceResource(result.uri) }) },
token
);
}
//#endregion
}

View file

@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const emptyStr = '';
function defaultResolver(...args: any[]): string {
if (args.length === 1) {
const arg0 = args[0];
if (arg0 === undefined || arg0 === null) {
return emptyStr;
}
if (typeof arg0 === 'string') {
return arg0;
}
if (typeof arg0 === 'number' || typeof arg0 === 'boolean') {
return String(arg0);
}
return JSON.stringify(arg0);
}
return JSON.stringify(args);
}
function iPromise<T>(obj: T | Promise<T>): obj is Promise<T> {
return typeof (obj as Promise<T>)?.then === 'function';
}
export function gate<T extends (...arg: any) => any>(resolver?: (...args: Parameters<T>) => string) {
return (_target: any, key: string, descriptor: PropertyDescriptor) => {
let fn: Function | undefined;
if (typeof descriptor.value === 'function') {
fn = descriptor.value;
} else if (typeof descriptor.get === 'function') {
fn = descriptor.get;
}
if (fn === undefined || fn === null) {
throw new Error('Not supported');
}
const gateKey = `$gate$${key}`;
descriptor.value = function (this: any, ...args: any[]) {
const prop =
args.length === 0 ? gateKey : `${gateKey}$${(resolver ?? defaultResolver)(...(args as Parameters<T>))}`;
if (!Object.prototype.hasOwnProperty.call(this, prop)) {
Object.defineProperty(this, prop, {
configurable: false,
enumerable: false,
writable: true,
value: undefined,
});
}
let promise = this[prop];
if (promise === undefined) {
let result;
try {
result = fn!.apply(this, args);
if (result === undefined || fn === null || !iPromise(result)) {
return result;
}
this[prop] = promise = result
.then((r: any) => {
this[prop] = undefined;
return r;
})
.catch(ex => {
this[prop] = undefined;
throw ex;
});
} catch (ex) {
this[prop] = undefined;
throw ex;
}
}
return promise;
};
};
}

View file

@ -0,0 +1,504 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { authentication, AuthenticationSession, Disposable, Event, EventEmitter, Range, Uri } from 'vscode';
import { graphql } from '@octokit/graphql';
import { Octokit } from '@octokit/rest';
import { ContextStore } from '../contextStore';
import { fromGitHubUri } from './fs';
import { isSha } from '../extension';
import { Iterables } from '../iterables';
export interface GitHubApiContext {
requestRef: string;
branch: string;
sha: string | undefined;
timestamp: number;
}
interface CreateCommitOperation {
type: 'created';
path: string;
content: string
}
interface ChangeCommitOperation {
type: 'changed';
path: string;
content: string
}
interface DeleteCommitOperation {
type: 'deleted';
path: string;
content: undefined
}
export type CommitOperation = CreateCommitOperation | ChangeCommitOperation | DeleteCommitOperation;
type ArrayElement<T extends Array<unknown>> = T extends (infer U)[] ? U : never;
type GitCreateTreeParamsTree = ArrayElement<NonNullable<Parameters<Octokit['git']['createTree']>[0]>['tree']>;
function getGitHubRootUri(uri: Uri) {
const rootIndex = uri.path.indexOf('/', uri.path.indexOf('/', 1) + 1);
return uri.with({
path: uri.path.substring(0, rootIndex === -1 ? undefined : rootIndex),
query: ''
});
}
export class GitHubApi implements Disposable {
private _onDidChangeContext = new EventEmitter<Uri>();
get onDidChangeContext(): Event<Uri> {
return this._onDidChangeContext.event;
}
private readonly disposable: Disposable;
constructor(private readonly context: ContextStore<GitHubApiContext>) {
this.disposable = Disposable.from(
context.onDidChange(e => this._onDidChangeContext.fire(e))
);
}
dispose() {
this.disposable.dispose();
}
private _session: AuthenticationSession | undefined;
async ensureAuthenticated() {
if (this._session === undefined) {
const providers = await authentication.getProviderIds();
if (!providers.includes('github')) {
await new Promise(resolve => {
authentication.onDidChangeAuthenticationProviders(e => {
if (e.added.includes('github')) {
resolve();
}
});
});
}
this._session = await authentication.getSession('github', ['repo'], { createIfNone: true });
}
return this._session;
}
private _graphql: typeof graphql | undefined;
private async graphql() {
if (this._graphql === undefined) {
const session = await this.ensureAuthenticated();
this._graphql = graphql.defaults({
headers: {
Authorization: `Bearer ${session.accessToken}`,
}
});
}
return this._graphql;
}
private _octokit: typeof Octokit | undefined;
private async octokit(options?: ConstructorParameters<typeof Octokit>[0]) {
if (this._octokit === undefined) {
const session = await this.ensureAuthenticated();
this._octokit = Octokit.defaults({ auth: `token ${session.accessToken}` });
}
return new this._octokit(options);
}
async commit(rootUri: Uri, message: string, operations: CommitOperation[]): Promise<string | undefined> {
const { owner, repo } = fromGitHubUri(rootUri);
try {
const context = await this.getContext(rootUri);
if (context.sha === undefined) {
throw new Error(`Cannot commit to Uri(${rootUri.toString(true)}); Invalid context sha`);
}
const hasDeletes = operations.some(op => op.type === 'deleted');
const github = await this.octokit();
const treeResp = await github.git.getTree({
owner: owner,
repo: repo,
tree_sha: context.sha,
recursive: hasDeletes ? 'true' : undefined,
});
// 0100000000000000 (040000): Directory
// 1000000110100100 (100644): Regular non-executable file
// 1000000110110100 (100664): Regular non-executable group-writeable file
// 1000000111101101 (100755): Regular executable file
// 1010000000000000 (120000): Symbolic link
// 1110000000000000 (160000): Gitlink
let updatedTree: GitCreateTreeParamsTree[];
if (hasDeletes) {
updatedTree = treeResp.data.tree as GitCreateTreeParamsTree[];
for (const operation of operations) {
switch (operation.type) {
case 'created':
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
break;
case 'changed': {
const index = updatedTree.findIndex(item => item.path === operation.path);
if (index !== -1) {
const { path, mode, type } = updatedTree[index];
updatedTree.splice(index, 1, { path: path, mode: mode, type: type, content: operation.content });
}
break;
}
case 'deleted': {
const index = updatedTree.findIndex(item => item.path === operation.path);
if (index !== -1) {
updatedTree.splice(index, 1);
}
break;
}
}
}
} else {
updatedTree = [];
for (const operation of operations) {
switch (operation.type) {
case 'created':
updatedTree.push({ path: operation.path, mode: '100644', type: 'blob', content: operation.content });
break;
case 'changed':
const item = treeResp.data.tree.find(item => item.path === operation.path) as GitCreateTreeParamsTree;
if (item !== undefined) {
const { path, mode, type } = item;
updatedTree.push({ path: path, mode: mode, type: type, content: operation.content });
}
break;
}
}
}
const updatedTreeResp = await github.git.createTree({
owner: owner,
repo: repo,
base_tree: hasDeletes ? undefined : treeResp.data.sha,
tree: updatedTree
});
const resp = await github.git.createCommit({
owner: owner,
repo: repo,
message: message,
tree: updatedTreeResp.data.sha,
parents: [context.sha]
});
this.updateContext(rootUri, { ...context, sha: resp.data.sha, timestamp: Date.now() });
// TODO@eamodio need to send a file change for any open files
await github.git.updateRef({
owner: owner,
repo: repo,
ref: `heads/${context.branch}`,
sha: resp.data.sha
});
return resp.data.sha;
} catch (ex) {
console.log(ex);
throw ex;
}
}
async defaultBranchQuery(uri: Uri) {
const { owner, repo } = fromGitHubUri(uri);
try {
const query = `query defaultBranch($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
defaultBranchRef {
name
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
});
return rsp?.repository?.defaultBranchRef?.name ?? undefined;
} catch (ex) {
return undefined;
}
}
async filesQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
const context = await this.getContext(uri);
const resp = await (await this.octokit()).git.getTree({
owner: owner,
repo: repo,
recursive: '1',
tree_sha: context?.sha ?? ref,
});
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
} catch (ex) {
return [];
}
}
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
const { owner, repo, path, ref } = fromGitHubUri(uri);
try {
const context = await this.getContext(uri);
const query = `query fs($owner: String!, $repo: String!, $path: String) {
repository(owner: $owner, name: $repo) {
object(expression: $path) {
${innerQuery}
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { object: T | null | undefined };
}>(query, {
owner: owner,
repo: repo,
path: `${context.sha ?? ref}:${path}`,
});
return rsp?.repository?.object ?? undefined;
} catch (ex) {
return undefined;
}
}
async latestCommitQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
if (ref === 'HEAD') {
const query = `query latest($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
defaultBranchRef {
target {
oid
}
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { defaultBranchRef: { name: string; target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
});
return rsp?.repository?.defaultBranchRef?.target.oid ?? undefined;
}
const query = `query latest($owner: String!, $repo: String!, $ref: String!) {
repository(owner: $owner, name: $repo) {
ref(qualifiedName: $ref) {
target {
oid
}
}
}
}`;
const rsp = await this.gqlQuery<{
repository: { ref: { target: { oid: string } } | null | undefined };
}>(query, {
owner: owner,
repo: repo,
ref: ref ?? 'HEAD',
});
return rsp?.repository?.ref?.target.oid ?? undefined;
} catch (ex) {
return undefined;
}
}
async searchQuery(
query: string,
uri: Uri,
options: { maxResults?: number; context?: { before?: number; after?: number } },
): Promise<SearchQueryResults> {
const { owner, repo, ref } = fromGitHubUri(uri);
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
if (ref !== 'HEAD') {
return { matches: [], limitHit: true };
}
try {
const resp = await (await this.octokit({
request: {
headers: {
accept: 'application/vnd.github.v3.text-match+json',
},
}
})).search.code({
q: `${query} repo:${owner}/${repo}`,
});
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
const range = new Range(0, 0, 0, 0);
const matches: SearchQueryMatch[] = [];
let counter = 0;
let match: SearchQueryMatch;
for (const item of resp.data.items) {
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
counter++;
if (options.maxResults !== undefined && counter > options.maxResults) {
return { matches: matches, limitHit: true };
}
match = {
path: item.path,
ranges: [],
preview: m.fragment,
matches: [],
};
for (const lm of m.matches) {
let line = 0;
let shartChar = 0;
let endChar = 0;
for (let i = 0; i < lm.indices[1]; i++) {
if (i === lm.indices[0]) {
shartChar = endChar;
}
if (m.fragment[i] === '\n') {
line++;
endChar = 0;
} else {
endChar++;
}
}
match.ranges.push(range);
match.matches.push(new Range(line, shartChar, line, endChar));
}
matches.push(match);
}
}
return { matches: matches, limitHit: false };
} catch (ex) {
return { matches: [], limitHit: true };
}
}
private async gqlQuery<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
return (await this.graphql())<T>(query, variables);
}
private readonly pendingContextRequests = new Map<string, Promise<GitHubApiContext>>();
async getContext(uri: Uri): Promise<GitHubApiContext> {
const rootUri = getGitHubRootUri(uri);
let pending = this.pendingContextRequests.get(rootUri.toString());
if (pending === undefined) {
pending = this.getContextCore(rootUri);
this.pendingContextRequests.set(rootUri.toString(), pending);
}
try {
return await pending;
} finally {
this.pendingContextRequests.delete(rootUri.toString());
}
}
private readonly rootUriToContextMap = new Map<string, GitHubApiContext>();
private async getContextCore(rootUri: Uri): Promise<GitHubApiContext> {
const key = rootUri.toString();
let context = this.rootUriToContextMap.get(key);
// Check if we have a cached a context
if (context?.sha !== undefined) {
return context;
}
// Check if we have a saved context
context = this.context.get(rootUri);
if (context?.sha !== undefined) {
this.rootUriToContextMap.set(key, context);
return context;
}
const { ref } = fromGitHubUri(rootUri);
// If the requested ref looks like a sha, then use it
if (isSha(ref)) {
context = { requestRef: ref, branch: ref, sha: ref, timestamp: Date.now() };
} else {
let branch;
if (ref === 'HEAD') {
branch = await this.defaultBranchQuery(rootUri);
if (branch === undefined) {
throw new Error(`Cannot get context for Uri(${rootUri.toString(true)}); unable to get default branch`);
}
} else {
branch = ref;
}
// Query for the latest sha for the give ref
const sha = await this.latestCommitQuery(rootUri);
context = { requestRef: ref, branch: branch, sha: sha, timestamp: Date.now() };
}
this.updateContext(rootUri, context);
return context;
}
private updateContext(rootUri: Uri, context: GitHubApiContext) {
this.rootUriToContextMap.set(rootUri.toString(), context);
this.context.set(rootUri, context);
}
}
interface GitHubSearchTextMatch {
object_url: string;
object_type: string;
property: string;
fragment: string;
matches: {
text: string;
indices: number[];
}[];
}
interface SearchQueryMatch {
path: string;
ranges: Range[];
preview: string;
matches: Range[];
}
interface SearchQueryResults {
matches: SearchQueryMatch[];
limitHit: boolean;
}

View file

@ -5,8 +5,6 @@
'use strict';
import {
authentication,
AuthenticationSession2,
CancellationToken,
Disposable,
Event,
@ -20,7 +18,6 @@ import {
FileSystemProvider,
FileType,
Progress,
Range,
TextSearchComplete,
TextSearchOptions,
TextSearchProvider,
@ -29,11 +26,11 @@ import {
Uri,
workspace,
} from 'vscode';
import { Octokit } from '@octokit/rest';
import { graphql } from '@octokit/graphql/';
import * as fuzzySort from 'fuzzysort';
import fetch from 'node-fetch';
import { Iterables } from './iterables';
import { GitHubApi } from './api';
import { Iterables } from '../iterables';
import { getRootUri } from '../extension';
const emptyDisposable = { dispose: () => { /* noop */ } };
const replaceBackslashRegex = /(\/|\\)/g;
@ -53,16 +50,17 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
}
private readonly disposable: Disposable;
private fsCache = new Map<string, any>();
private fsCache = new Map<string, Map<string, any>>();
constructor() {
constructor(private readonly github: GitHubApi) {
this.disposable = Disposable.from(
workspace.registerFileSystemProvider(GitHubFS.scheme, this, {
isCaseSensitive: true,
isReadonly: true,
isReadonly: true
}),
workspace.registerFileSearchProvider(GitHubFS.scheme, this),
workspace.registerTextSearchProvider(GitHubFS.scheme, this),
github.onDidChangeContext(e => this.fsCache.delete(e.toString()))
);
}
@ -70,22 +68,18 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
this.disposable?.dispose();
}
private _github: Promise<GitHubApi | undefined> | undefined;
get github(): Promise<GitHubApi | undefined> {
if (this._github === undefined) {
this._github = this.getGitHubApi();
private getCache(uri: Uri) {
const rootUri = getRootUri(uri);
if (rootUri === undefined) {
return undefined;
}
return this._github;
}
private async getGitHubApi(): Promise<GitHubApi | undefined> {
try {
const session = await authentication.getSession('github', ['repo'], { createIfNone: true });
return new GitHubApi(session);
} catch (ex) {
this._github = undefined;
throw ex;
let cache = this.fsCache.get(rootUri.toString());
if (cache === undefined) {
cache = new Map<string, any>();
this.fsCache.set(rootUri.toString(), cache);
}
return cache;
}
//#region FileSystemProvider
@ -96,7 +90,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
async stat(uri: Uri): Promise<FileStat> {
if (uri.path === '' || uri.path.lastIndexOf('/') === 0) {
return { type: FileType.Directory, size: 0, ctime: 0, mtime: 0 };
const context = await this.github.getContext(uri);
return { type: FileType.Directory, size: 0, ctime: 0, mtime: context?.timestamp };
}
const data = await this.fsQuery<{
@ -108,14 +103,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
...on Blob {
byteSize
}`,
this.fsCache,
this.getCache(uri),
);
if (data === undefined) {
throw FileSystemError.FileNotFound();
}
const context = await this.github.getContext(uri);
return {
type: typenameToFileType(data?.__typename),
size: data?.byteSize ?? 0,
type: typenameToFileType(data.__typename),
size: data.byteSize ?? 0,
ctime: 0,
mtime: 0,
mtime: context?.timestamp,
};
}
@ -130,7 +131,7 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
type
}
}`,
this.fsCache,
this.getCache(uri),
);
return (data?.entries ?? []).map<[string, FileType]>(e => [
@ -139,8 +140,8 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
]);
}
createDirectory(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
createDirectory(_uri: Uri): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
async readFile(uri: Uri): Promise<Uint8Array> {
@ -172,20 +173,20 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
return textEncoder.encode(data?.text ?? '');
}
writeFile(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
async writeFile(_uri: Uri, _content: Uint8Array, _options: { create: boolean, overwrite: boolean }): Promise<void> {
throw FileSystemError.NoPermissions();
}
delete(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
delete(_uri: Uri, _options: { recursive: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
rename(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
rename(_oldUri: Uri, _newUri: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
copy?(): void | Thenable<void> {
throw FileSystemError.NoPermissions;
copy(_source: Uri, _destination: Uri, _options: { overwrite: boolean }): void | Thenable<void> {
throw FileSystemError.NoPermissions();
}
//#endregion
@ -201,8 +202,10 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
): Promise<Uri[]> {
let searchable = this.fileSearchCache.get(options.folder.toString(true));
if (searchable === undefined) {
const matches = await (await this.github)?.filesQuery(options.folder);
if (matches === undefined || token.isCancellationRequested) { return []; }
const matches = await this.github.filesQuery(options.folder);
if (matches === undefined || token.isCancellationRequested) {
return [];
}
searchable = [...Iterables.map(matches, m => (fuzzySort as Fuzzysort).prepareSlow(m))];
this.fileSearchCache.set(options.folder.toString(true), searchable);
@ -233,13 +236,12 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
query: TextSearchQuery,
options: TextSearchOptions,
progress: Progress<TextSearchResult>,
token: CancellationToken,
_token: CancellationToken,
): Promise<TextSearchComplete> {
const results = await (await this.github)?.searchQuery(
const results = await this.github.searchQuery(
query.pattern,
options.folder,
{ maxResults: options.maxResults, context: { before: options.beforeContext, after: options.afterContext } },
token,
);
if (results === undefined) { return { limitHit: true }; }
@ -266,9 +268,11 @@ export class GitHubFS implements FileSystemProvider, FileSearchProvider, TextSea
const key = `${uri.toString()}:${getHashCode(query)}`;
let data = cache?.get(key);
if (data !== undefined) { return data as T; }
if (data !== undefined) {
return data as T;
}
data = await (await this.github)?.fsQuery<T>(uri, query);
data = await this.github.fsQuery<T>(uri, query);
cache?.set(key, data);
return data;
}
@ -295,15 +299,19 @@ function typenameToFileType(typename: string | undefined | null) {
}
}
type RepoInfo = { owner: string; repo: string; path: string | undefined; ref?: string };
function fromGitHubUri(uri: Uri): RepoInfo {
type RepoInfo = { owner: string; repo: string; path: string | undefined; ref: string };
export function fromGitHubUri(uri: Uri): RepoInfo {
const [, owner, repo, ...rest] = uri.path.split('/');
let ref;
if (uri.authority) {
ref = uri.authority;
// The casing of HEAD is important for the GitHub api to work
if (/HEAD/i.test(ref)) {
ref = 'HEAD';
}
}
return { owner: owner, repo: repo, path: rest.join('/'), ref: ref };
return { owner: owner, repo: repo, path: rest.join('/'), ref: ref ?? 'HEAD' };
}
function getHashCode(s: string): number {
@ -322,175 +330,3 @@ function getHashCode(s: string): number {
}
return hash;
}
interface SearchQueryMatch {
path: string;
ranges: Range[];
preview: string;
matches: Range[];
}
interface SearchQueryResults {
matches: SearchQueryMatch[];
limitHit: boolean;
}
class GitHubApi {
constructor(private readonly session: AuthenticationSession2) { }
private _graphql: typeof graphql | undefined;
private get graphql() {
if (this._graphql === undefined) {
this._graphql = graphql.defaults({
headers: {
Authorization: `Bearer ${this.token}`,
}
});
}
return this._graphql;
}
get token() {
return this.session.accessToken;
}
async filesQuery(uri: Uri) {
const { owner, repo, ref } = fromGitHubUri(uri);
try {
const resp = await new Octokit({
auth: `token ${this.token}`,
}).git.getTree({
owner: owner,
repo: repo,
recursive: '1',
tree_sha: ref ?? 'HEAD',
});
return Iterables.filterMap(resp.data.tree, p => p.type === 'blob' ? p.path : undefined);
} catch (ex) {
return [];
}
}
async searchQuery(
query: string,
uri: Uri,
options: { maxResults?: number; context?: { before?: number; after?: number } },
_token: CancellationToken,
): Promise<SearchQueryResults> {
const { owner, repo, ref } = fromGitHubUri(uri);
// If we have a specific ref, don't try to search, because GitHub search only works against the default branch
if (ref === undefined) {
return { matches: [], limitHit: true };
}
try {
const resp = await new Octokit({
auth: `token ${this.token}`,
request: {
headers: {
accept: 'application/vnd.github.v3.text-match+json',
},
}
}).search.code({
q: `${query} repo:${owner}/${repo}`,
});
// Since GitHub doesn't return ANY line numbers just fake it at the top of the file 😢
const range = new Range(0, 0, 0, 0);
const matches: SearchQueryMatch[] = [];
console.log(resp.data.items.length, resp.data.items);
let counter = 0;
let match: SearchQueryMatch;
for (const item of resp.data.items) {
for (const m of (item as typeof item & { text_matches: GitHubSearchTextMatch[] }).text_matches) {
counter++;
if (options.maxResults !== undefined && counter > options.maxResults) {
return { matches: matches, limitHit: true };
}
match = {
path: item.path,
ranges: [],
preview: m.fragment,
matches: [],
};
for (const lm of m.matches) {
let line = 0;
let shartChar = 0;
let endChar = 0;
for (let i = 0; i < lm.indices[1]; i++) {
if (i === lm.indices[0]) {
shartChar = endChar;
}
if (m.fragment[i] === '\n') {
line++;
endChar = 0;
} else {
endChar++;
}
}
match.ranges.push(range);
match.matches.push(new Range(line, shartChar, line, endChar));
}
matches.push(match);
}
}
return { matches: matches, limitHit: false };
} catch (ex) {
return { matches: [], limitHit: true };
}
}
async fsQuery<T>(uri: Uri, innerQuery: string): Promise<T | undefined> {
try {
const query = `query fs($owner: String!, $repo: String!, $path: String) {
repository(owner: $owner, name: $repo) {
object(expression: $path) {
${innerQuery}
}
}
}`;
const { owner, repo, path, ref } = fromGitHubUri(uri);
const variables = {
owner: owner,
repo: repo,
path: `${ref ?? 'HEAD'}:${path}`,
};
const rsp = await this.query<{
repository: { object: T | null | undefined };
}>(query, variables);
return rsp?.repository?.object ?? undefined;
} catch (ex) {
return undefined;
}
}
query<T>(query: string, variables: { [key: string]: string | number }): Promise<T | undefined> {
return this.graphql(query, variables) as Promise<T | undefined>;
}
}
interface GitHubSearchTextMatch {
object_url: string;
object_type: string;
property: string;
fragment: string;
matches: GitHubSearchMatch[];
}
interface GitHubSearchMatch {
text: string;
indices: number[];
}

View file

@ -0,0 +1,177 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { CancellationToken, commands, Disposable, scm, SourceControl, SourceControlResourceGroup, SourceControlResourceState, Uri, window, workspace } from 'vscode';
import * as nls from 'vscode-nls';
import { IChangeStore } from './changeStore';
import { GitHubApi, CommitOperation } from './github/api';
import { getRelativePath } from './extension';
const localize = nls.loadMessageBundle();
interface ScmProvider {
sourceControl: SourceControl,
groups: SourceControlResourceGroup[]
}
export class VirtualSCM implements Disposable {
private readonly providers: ScmProvider[] = [];
private disposable: Disposable;
constructor(
private readonly originalScheme: string,
private readonly github: GitHubApi,
private readonly changeStore: IChangeStore,
) {
this.registerCommands();
// TODO@eamodio listen for workspace folder changes
for (const folder of workspace.workspaceFolders ?? []) {
this.createScmProvider(folder.uri, folder.name);
for (const operation of changeStore.getChanges(folder.uri)) {
this.update(folder.uri, operation.uri);
}
}
this.disposable = Disposable.from(
changeStore.onDidChange(e => this.update(e.rootUri, e.uri)),
);
}
dispose() {
this.disposable.dispose();
}
private registerCommands() {
commands.registerCommand('githubBrowser.commit', (sourceControl: SourceControl | undefined) => {
// TODO@eamodio remove this hack once I figure out why the args are missing
if (sourceControl === undefined && this.providers.length === 1) {
sourceControl = this.providers[0].sourceControl;
}
if (sourceControl === undefined) {
return;
}
this.commitChanges(sourceControl);
});
commands.registerCommand('githubBrowser.discardChanges', (resourceState: SourceControlResourceState) =>
this.discardChanges(resourceState.resourceUri)
);
commands.registerCommand('githubBrowser.openChanges', (resourceState: SourceControlResourceState) =>
this.openChanges(resourceState.resourceUri)
);
commands.registerCommand('githubBrowser.openFile', (resourceState: SourceControlResourceState) =>
this.openFile(resourceState.resourceUri)
);
}
async commitChanges(sourceControl: SourceControl): Promise<void> {
const operations = this.changeStore
.getChanges(sourceControl.rootUri!)
.map<CommitOperation>(operation => {
const path = getRelativePath(sourceControl.rootUri!, operation.uri);
switch (operation.type) {
case 'created':
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
case 'changed':
return { type: operation.type, path: path, content: this.changeStore.getContent(operation.uri)! };
case 'deleted':
return { type: operation.type, path: path };
}
});
if (!operations.length) {
window.showInformationMessage(localize('no changes', "There are no changes to commit."));
return;
}
const message = sourceControl.inputBox.value;
if (message) {
const sha = await this.github.commit(this.getOriginalResource(sourceControl.rootUri!), message, operations);
if (sha !== undefined) {
this.changeStore.acceptAll(sourceControl.rootUri!);
sourceControl.inputBox.value = '';
}
}
}
discardChanges(uri: Uri): Promise<void> {
return this.changeStore.discard(uri);
}
openChanges(uri: Uri) {
return this.changeStore.openChanges(uri, this.getOriginalResource(uri));
}
openFile(uri: Uri) {
return this.changeStore.openFile(uri);
}
private update(rootUri: Uri, uri: Uri) {
const folder = workspace.getWorkspaceFolder(uri);
if (folder === undefined) {
return;
}
const provider = this.createScmProvider(rootUri, folder.name);
const group = this.createChangesGroup(provider);
group.resourceStates = this.changeStore.getChanges(rootUri).map<SourceControlResourceState>(op => {
const rs: SourceControlResourceState = {
decorations: {
strikeThrough: op.type === 'deleted'
},
resourceUri: op.uri,
command: {
command: 'githubBrowser.openChanges',
title: 'Open Changes',
}
};
rs.command!.arguments = [rs];
return rs;
});
}
private createScmProvider(rootUri: Uri, name: string) {
let provider = this.providers.find(sc => sc.sourceControl.rootUri?.toString() === rootUri.toString());
if (provider === undefined) {
const sourceControl = scm.createSourceControl('github', name, rootUri);
sourceControl.quickDiffProvider = { provideOriginalResource: uri => this.getOriginalResource(uri) };
sourceControl.acceptInputCommand = {
command: 'githubBrowser.commit',
title: 'Commit',
arguments: [sourceControl]
};
sourceControl.inputBox.placeholder = `Message (Ctrl+Enter to commit '${name}')`;
// sourceControl.inputBox.validateInput = value => value ? undefined : 'Invalid commit message';
provider = { sourceControl: sourceControl, groups: [] };
this.createChangesGroup(provider);
this.providers.push(provider);
}
return provider;
}
private createChangesGroup(provider: ScmProvider) {
let group = provider.groups.find(g => g.id === 'github.changes');
if (group === undefined) {
group = provider.sourceControl.createResourceGroup('github.changes', 'Changes');
provider.groups.push(group);
}
return group;
}
private getOriginalResource(uri: Uri, _token?: CancellationToken): Uri {
return uri.with({ scheme: this.originalScheme });
}
}

View file

@ -0,0 +1,29 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const textDecoder = new TextDecoder();
const textEncoder = new TextEncoder();
declare let WEBWORKER: boolean;
export async function sha1(s: string | Uint8Array): Promise<string> {
while (true) {
try {
if (WEBWORKER) {
const hash = await globalThis.crypto.subtle.digest({ name: 'sha-1' }, typeof s === 'string' ? textEncoder.encode(s) : s);
// Use encodeURIComponent to avoid issues with btoa and Latin-1 characters
return globalThis.btoa(encodeURIComponent(textDecoder.decode(hash)));
} else {
return (await import('crypto')).createHash('sha1').update(s).digest('base64');
}
} catch (ex) {
if (ex instanceof ReferenceError) {
(global as any).WEBWORKER = false;
}
}
}
}

View file

@ -0,0 +1,99 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { Disposable, StatusBarAlignment, StatusBarItem, Uri, window, workspace } from 'vscode';
import { ChangeStoreEvent, IChangeStore } from './changeStore';
import { GitHubApiContext } from './github/api';
import { isSha } from './extension';
import { ContextStore, WorkspaceFolderContext } from './contextStore';
export class StatusBar implements Disposable {
private readonly disposable: Disposable;
private readonly items = new Map<string, StatusBarItem>();
constructor(
private readonly contextStore: ContextStore<GitHubApiContext>,
private readonly changeStore: IChangeStore
) {
this.disposable = Disposable.from(
contextStore.onDidChange(this.onContextsChanged, this),
changeStore.onDidChange(this.onChanged, this)
);
for (const context of this.contextStore.getForWorkspace()) {
this.createOrUpdateStatusBarItem(context);
}
}
dispose() {
this.disposable?.dispose();
this.items.forEach(i => i.dispose());
}
private createOrUpdateStatusBarItem(wc: WorkspaceFolderContext<GitHubApiContext>) {
let item = this.items.get(wc.folderUri.toString());
if (item === undefined) {
item = window.createStatusBarItem({
id: `githubBrowser.branch:${wc.folderUri.toString()}`,
name: `GitHub Browser: ${wc.name}`,
alignment: StatusBarAlignment.Left,
priority: 1000
});
}
if (isSha(wc.context.branch)) {
item.text = `$(git-commit) ${wc.context.branch.substr(0, 8)}`;
item.tooltip = `${wc.name} \u2022 ${wc.context.branch.substr(0, 8)}`;
} else {
item.text = `$(git-branch) ${wc.context.branch}`;
item.tooltip = `${wc.name} \u2022 ${wc.context.branch}${wc.context.sha ? ` @ ${wc.context.sha?.substr(0, 8)}` : ''}`;
}
const hasChanges = this.changeStore.hasChanges(wc.folderUri);
if (hasChanges) {
item.text += '*';
}
item.show();
this.items.set(wc.folderUri.toString(), item);
}
private onContextsChanged(uri: Uri) {
const folder = workspace.getWorkspaceFolder(this.contextStore.getWorkspaceResource(uri));
if (folder === undefined) {
return;
}
const context = this.contextStore.get(uri);
if (context === undefined) {
return;
}
this.createOrUpdateStatusBarItem({
context: context,
name: folder.name,
folderUri: folder.uri,
});
}
private onChanged(e: ChangeStoreEvent) {
const item = this.items.get(e.rootUri.toString());
if (item !== undefined) {
const hasChanges = this.changeStore.hasChanges(e.rootUri);
if (hasChanges) {
if (!item.text.endsWith('*')) {
item.text += '*';
}
} else {
if (item.text.endsWith('*')) {
item.text = item.text.substr(0, item.text.length - 1);
}
}
}
}
}

View file

@ -1,11 +1,12 @@
{
"extends": "../shared.tsconfig.json",
"compilerOptions": {
"outDir": "./out",
// "experimentalDecorators": true,
// "typeRoots": [
// "./node_modules/@types"
// ]
"experimentalDecorators": true,
"lib": [
"es2018",
"dom"
],
"outDir": "./out"
},
"include": [
"src/**/*"

File diff suppressed because it is too large Load diff

View file

@ -27,21 +27,15 @@ function getAgent(url: string | undefined = process.env.HTTPS_PROXY): Agent {
const scopes = ['repo', 'workflow'];
export async function getSession(): Promise<AuthenticationSession> {
const authenticationSessions = await authentication.getSessions('github', scopes);
if (authenticationSessions.length) {
return await authenticationSessions[0];
} else {
return await authentication.login('github', scopes);
}
return await authentication.getSession('github', scopes, { createIfNone: true });
}
let _octokit: Promise<Octokit> | undefined;
export function getOctokit(): Promise<Octokit> {
if (!_octokit) {
_octokit = getSession().then(async session => {
const token = await session.getAccessToken();
_octokit = getSession().then(session => {
const token = session.accessToken;
const agent = getAgent();
return new Octokit({

View file

@ -6,9 +6,10 @@
import * as vscode from 'vscode';
import { API as GitAPI } from './typings/git';
import { publishRepository } from './publish';
import { combinedDisposable } from './util';
export function registerCommands(gitAPI: GitAPI): vscode.Disposable[] {
const disposables = [];
export function registerCommands(gitAPI: GitAPI): vscode.Disposable {
const disposables: vscode.Disposable[] = [];
disposables.push(vscode.commands.registerCommand('github.publish', async () => {
try {
@ -18,5 +19,5 @@ export function registerCommands(gitAPI: GitAPI): vscode.Disposable[] {
}
}));
return disposables;
return combinedDisposable(disposables);
}

View file

@ -17,7 +17,7 @@ class GitHubCredentialProvider implements CredentialsProvider {
}
const session = await getSession();
return { username: session.account.id, password: await session.getAccessToken() };
return { username: session.account.id, password: session.accessToken };
}
}

View file

@ -3,17 +3,41 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as vscode from 'vscode';
import { Disposable, ExtensionContext, extensions } from 'vscode';
import { GithubRemoteSourceProvider } from './remoteSourceProvider';
import { GitExtension } from './typings/git';
import { registerCommands } from './commands';
import { GithubCredentialProviderManager } from './credentialProvider';
import { dispose, combinedDisposable } from './util';
export async function activate(context: vscode.ExtensionContext) {
const gitExtension = vscode.extensions.getExtension<GitExtension>('vscode.git')!.exports;
const gitAPI = gitExtension.getAPI(1);
export function activate(context: ExtensionContext): void {
const disposables = new Set<Disposable>();
context.subscriptions.push(combinedDisposable(disposables));
context.subscriptions.push(...registerCommands(gitAPI));
context.subscriptions.push(gitAPI.registerRemoteSourceProvider(new GithubRemoteSourceProvider(gitAPI)));
context.subscriptions.push(new GithubCredentialProviderManager(gitAPI));
const init = () => {
try {
const gitAPI = gitExtension.getAPI(1);
disposables.add(registerCommands(gitAPI));
disposables.add(gitAPI.registerRemoteSourceProvider(new GithubRemoteSourceProvider(gitAPI)));
disposables.add(new GithubCredentialProviderManager(gitAPI));
} catch (err) {
console.error('Could not initialize GitHub extension');
console.warn(err);
}
};
const onDidChangeGitExtensionEnablement = (enabled: boolean) => {
if (!enabled) {
dispose(disposables);
disposables.clear();
} else {
init();
}
};
const gitExtension = extensions.getExtension<GitExtension>('vscode.git')!.exports;
context.subscriptions.push(gitExtension.onDidChangeEnablement(onDidChangeGitExtensionEnablement));
onDidChangeGitExtensionEnablement(gitExtension.enabled);
}

View file

@ -5,10 +5,10 @@
import * as vscode from 'vscode';
import * as nls from 'vscode-nls';
import * as path from 'path';
import { promises as fs } from 'fs';
import { API as GitAPI, Repository } from './typings/git';
import { getOctokit } from './auth';
import { TextEncoder } from 'util';
import { basename } from 'path';
const localize = nls.loadMessageBundle();
@ -28,10 +28,12 @@ export async function publishRepository(gitAPI: GitAPI, repository?: Repository)
return;
}
let folder: vscode.WorkspaceFolder;
let folder: vscode.Uri;
if (vscode.workspace.workspaceFolders.length === 1) {
folder = vscode.workspace.workspaceFolders[0];
if (repository) {
folder = repository.rootUri;
} else if (vscode.workspace.workspaceFolders.length === 1) {
folder = vscode.workspace.workspaceFolders[0].uri;
} else {
const picks = vscode.workspace.workspaceFolders.map(folder => ({ label: folder.name, folder }));
const placeHolder = localize('pick folder', "Pick a folder to publish to GitHub");
@ -41,14 +43,14 @@ export async function publishRepository(gitAPI: GitAPI, repository?: Repository)
return;
}
folder = pick.folder;
folder = pick.folder.uri;
}
let quickpick = vscode.window.createQuickPick<vscode.QuickPickItem & { repo?: string, auth?: 'https' | 'ssh' }>();
quickpick.ignoreFocusOut = true;
quickpick.placeholder = 'Repository Name';
quickpick.value = folder.name;
quickpick.value = basename(folder.fsPath);
quickpick.show();
quickpick.busy = true;
@ -97,37 +99,49 @@ export async function publishRepository(gitAPI: GitAPI, repository?: Repository)
return;
}
quickpick = vscode.window.createQuickPick();
quickpick.placeholder = localize('ignore', "Select which files should be included in the repository.");
quickpick.canSelectMany = true;
quickpick.show();
if (!repository) {
const gitignore = vscode.Uri.joinPath(folder, '.gitignore');
let shouldGenerateGitignore = false;
try {
quickpick.busy = true;
const repositoryPath = folder.uri.fsPath;
const currentPath = path.join(repositoryPath);
const children = await fs.readdir(currentPath);
quickpick.items = children.map(name => ({ label: name }));
quickpick.selectedItems = quickpick.items;
quickpick.busy = false;
const result = await Promise.race([
new Promise<readonly vscode.QuickPickItem[]>(c => quickpick.onDidAccept(() => c(quickpick.selectedItems))),
new Promise<undefined>(c => quickpick.onDidHide(() => c(undefined)))
]);
if (!result) {
return;
try {
await vscode.workspace.fs.stat(gitignore);
} catch (err) {
shouldGenerateGitignore = true;
}
const ignored = new Set(children);
result.forEach(c => ignored.delete(c.label));
if (shouldGenerateGitignore) {
quickpick = vscode.window.createQuickPick();
quickpick.placeholder = localize('ignore', "Select which files should be included in the repository.");
quickpick.canSelectMany = true;
quickpick.show();
const raw = [...ignored].map(i => `/${i}`).join('\n');
await fs.writeFile(path.join(repositoryPath, '.gitignore'), raw, 'utf8');
} finally {
quickpick.dispose();
try {
quickpick.busy = true;
const children = (await vscode.workspace.fs.readDirectory(folder)).map(([name]) => name);
quickpick.items = children.map(name => ({ label: name }));
quickpick.selectedItems = quickpick.items;
quickpick.busy = false;
const result = await Promise.race([
new Promise<readonly vscode.QuickPickItem[]>(c => quickpick.onDidAccept(() => c(quickpick.selectedItems))),
new Promise<undefined>(c => quickpick.onDidHide(() => c(undefined)))
]);
if (!result) {
return;
}
const ignored = new Set(children);
result.forEach(c => ignored.delete(c.label));
const raw = [...ignored].map(i => `/${i}`).join('\n');
const encoder = new TextEncoder();
await vscode.workspace.fs.writeFile(gitignore, encoder.encode(raw));
} finally {
quickpick.dispose();
}
}
}
const githubRepository = await vscode.window.withProgress({ location: vscode.ProgressLocation.Notification, cancellable: false, title: 'Publish to GitHub' }, async progress => {
@ -143,7 +157,7 @@ export async function publishRepository(gitAPI: GitAPI, repository?: Repository)
progress.report({ message: 'Creating first commit', increment: 25 });
if (!repository) {
repository = await gitAPI.init(folder.uri) || undefined;
repository = await gitAPI.init(folder) || undefined;
if (!repository) {
return;

Some files were not shown because too many files have changed in this diff Show more