Merge pull request #161640 from microsoft/build-integrated-cli

Update build process to include integrated CLI
This commit is contained in:
Connor Peet 2022-10-12 08:59:37 -07:00 committed by GitHub
commit 6353f80f3d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
85 changed files with 6959 additions and 6045 deletions

View file

@ -91,7 +91,6 @@
"[rust]": {
"editor.defaultFormatter": "rust-lang.rust-analyzer",
"editor.formatOnSave": true,
"editor.insertSpaces": true
},
"typescript.tsc.autoDetect": "off",
"testing.autoRun.mode": "rerun",

View file

@ -0,0 +1,50 @@
parameters:
- name: VSCODE_CLI_TARGET
type: string
- name: VSCODE_CLI_ARTIFACT
type: string
- name: VSCODE_CLI_ENV
type: object
default: {}
steps:
- script: cargo build --release --target ${{ parameters.VSCODE_CLI_TARGET }} --bin=code-tunnel
displayName: Compile ${{ parameters.VSCODE_CLI_TARGET }}
workingDirectory: $(Build.SourcesDirectory)/cli
env:
VSCODE_CLI_VERSION: $(VSCODE_CLI_VERSION)
VSCODE_CLI_REMOTE_LICENSE_TEXT: $(VSCODE_CLI_REMOTE_LICENSE_TEXT)
VSCODE_CLI_REMOTE_LICENSE_PROMPT: $(VSCODE_CLI_REMOTE_LICENSE_PROMPT)
VSCODE_CLI_ASSET_NAME: ${{ parameters.VSCODE_CLI_ARTIFACT }}
VSCODE_CLI_AI_KEY: $(VSCODE_CLI_AI_KEY)
VSCODE_CLI_AI_ENDPOINT: $(VSCODE_CLI_AI_ENDPOINT)
${{ each pair in parameters.VSCODE_CLI_ENV }}:
${{ pair.key }}: ${{ pair.value }}
- ${{ if or(contains(parameters.VSCODE_CLI_TARGET, '-windows-'), contains(parameters.VSCODE_CLI_TARGET, '-darwin')) }}:
- task: ArchiveFiles@2
inputs:
${{ if contains(parameters.VSCODE_CLI_TARGET, '-windows-') }}:
rootFolderOrFile: $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code-tunnel.exe
${{ if contains(parameters.VSCODE_CLI_TARGET, '-darwin') }}:
rootFolderOrFile: $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code-tunnel
includeRootFolder: false
archiveType: zip
archiveFile: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.zip
- publish: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.zip
artifact: ${{ parameters.VSCODE_CLI_ARTIFACT }}
displayName: Publish ${{ parameters.VSCODE_CLI_ARTIFACT }} artifact
- ${{ else }}:
- task: ArchiveFiles@2
inputs:
rootFolderOrFile: $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code-tunnel
includeRootFolder: false
archiveType: tar
tarCompression: gz
archiveFile: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.tar.gz
- publish: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.tar.gz
artifact: ${{ parameters.VSCODE_CLI_ARTIFACT }}
displayName: Publish ${{ parameters.VSCODE_CLI_ARTIFACT }} artifact

View file

@ -0,0 +1,40 @@
parameters:
- name: VSCODE_CLI_ARTIFACTS
type: object
default: []
steps:
- task: UseDotNet@2
inputs:
version: 2.x
- task: EsrpClientTool@1
displayName: Download ESRPClient
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:
- task: DownloadPipelineArtifact@2
displayName: Download ${{ target }}
inputs:
artifact: ${{ target }}
path: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
displayName: Codesign
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
displayName: Notarize
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:
- script: |
set -e
ASSET_ID=$(echo "${{ target }}" | sed "s/unsigned_//")
mv $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/${{ target }}.zip $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/$ASSET_ID.zip
echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID"
displayName: Set asset id variable
- publish: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/$(ASSET_ID).zip
artifact: $(ASSET_ID)

View file

@ -0,0 +1,65 @@
parameters:
- name: VSCODE_CLI_ARTIFACTS
type: object
default: []
steps:
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: UseDotNet@2
displayName: "Use .NET"
inputs:
version: 3.x
- task: EsrpClientTool@1
displayName: "Use ESRP client"
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:
- task: DownloadPipelineArtifact@2
displayName: Download artifacts
inputs:
artifact: ${{ target }}
path: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}
- task: ExtractFiles@1
inputs:
archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip
destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }}
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$EsrpClientTool = (gci -directory -filter EsrpClientTool_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName
$EsrpCliZip = (gci -recurse -filter esrpcli.*.zip $EsrpClientTool | Select-Object -last 1).FullName
mkdir -p $(Agent.TempDirectory)\esrpcli
Expand-Archive -Path $EsrpCliZip -DestinationPath $(Agent.TempDirectory)\esrpcli
$EsrpCliDllPath = (gci -recurse -filter esrpcli.dll $(Agent.TempDirectory)\esrpcli | Select-Object -last 1).FullName
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/sign "*.exe" }
displayName: "Code sign"
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:
- powershell: |
$ASSET_ID = "${{ target }}".replace("unsigned_", "");
echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID"
displayName: Set asset id variable
- task: ArchiveFiles@2
inputs:
rootFolderOrFile: $(Build.ArtifactStagingDirectory)/sign/${{ target }}/code-tunnel.exe
includeRootFolder: false
archiveType: zip
archiveFile: $(Build.ArtifactStagingDirectory)/$(ASSET_ID).zip
- publish: $(Build.ArtifactStagingDirectory)/$(ASSET_ID).zip
artifact: $(ASSET_ID)

View file

@ -0,0 +1,37 @@
parameters:
- name: channel
type: string
default: stable
- name: targets
default: []
type: object
# Todo: use 1ES pipeline once extension is installed in ADO
steps:
- script: |
set -e
curl https://sh.rustup.rs -sSf | sh -s -- -y --profile minimal --default-toolchain $RUSTUP_TOOLCHAIN
echo "##vso[task.setvariable variable=PATH;]$PATH:$HOME/.cargo/bin"
env:
RUSTUP_TOOLCHAIN: ${{ parameters.channel }}
displayName: "Install Rust"
- script: |
set -e
rustup default $RUSTUP_TOOLCHAIN
rustup update $RUSTUP_TOOLCHAIN
env:
RUSTUP_TOOLCHAIN: ${{ parameters.channel }}
displayName: "Set Rust version"
- ${{ each target in parameters.targets }}:
- script: rustup target add ${{ target }}
displayName: "Adding Rust target '${{ target }}'"
- script: |
set -e
rustc --version
cargo --version
rustup --version
displayName: "Check Rust versions"

View file

@ -0,0 +1,38 @@
parameters:
- name: channel
type: string
default: stable
- name: targets
default: []
type: object
# Todo: use 1ES pipeline once extension is installed in ADO
steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
exec { curl -sSf -o rustup-init.exe https://win.rustup.rs }
exec { rustup-init.exe -y --profile minimal --default-toolchain %RUSTUP_TOOLCHAIN% --default-host x86_64-pc-windows-msvc }
echo "##vso[task.prependpath]$env:USERPROFILE\.cargo\bin"
env:
RUSTUP_TOOLCHAIN: ${{ parameters.channel }}
displayName: "Install Rust"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
exec { rustup default $RUSTUP_TOOLCHAIN }
exec { rustup update $RUSTUP_TOOLCHAIN }
env:
RUSTUP_TOOLCHAIN: ${{ parameters.channel }}
displayName: "Set Rust version"
- ${{ each target in parameters.targets }}:
- script: rustup target add ${{ target }}
displayName: "Adding Rust target '${{ target }}'"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
exec { rustc --version }
exec { cargo --version }
exec { rustup --version }
displayName: "Check Rust versions"

View file

@ -0,0 +1,32 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const getVersion_1 = require("../../lib/getVersion");
const fs = require("fs");
const path = require("path");
const packageJson = require("../../../package.json");
const root = path.dirname(path.dirname(path.dirname(__dirname)));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = (0, getVersion_1.getVersion)(root);
/**
* Sets build environment variables for the CLI for current contextual info.
*/
const setLauncherEnvironmentVars = () => {
const vars = new Map([
['VSCODE_CLI_REMOTE_LICENSE_TEXT', product.serverLicense],
['VSCODE_CLI_REMOTE_LICENSE_PROMPT', product.serverLicensePrompt],
['VSCODE_CLI_VERSION', packageJson.version],
['VSCODE_CLI_COMMIT', commit],
]);
for (const [key, value] of vars) {
if (value) {
console.log(`##vso[task.setvariable variable=${key}]${value}`);
}
}
};
if (require.main === module) {
setLauncherEnvironmentVars();
}

View file

@ -0,0 +1,35 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { getVersion } from '../../lib/getVersion';
import * as fs from 'fs';
import * as path from 'path';
import * as packageJson from '../../../package.json';
const root = path.dirname(path.dirname(path.dirname(__dirname)));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = getVersion(root);
/**
* Sets build environment variables for the CLI for current contextual info.
*/
const setLauncherEnvironmentVars = () => {
const vars = new Map([
['VSCODE_CLI_REMOTE_LICENSE_TEXT', product.serverLicense],
['VSCODE_CLI_REMOTE_LICENSE_PROMPT', product.serverLicensePrompt],
['VSCODE_CLI_VERSION', packageJson.version],
['VSCODE_CLI_COMMIT', commit],
]);
for (const [key, value] of vars) {
if (value) {
console.log(`##vso[task.setvariable variable=${key}]${value}`);
}
}
};
if (require.main === module) {
setLauncherEnvironmentVars();
}

View file

@ -0,0 +1,21 @@
parameters:
- name: VSCODE_CLI_TARGETS
default: []
type: object
- name: VSCODE_CLI_RUST_CHANNEL
type: string
default: stable
steps:
- template: ./install-rust.yml
parameters:
targets: []
channel: ${{ parameters.VSCODE_CLI_RUST_CHANNEL }}
- script: rustup component add clippy && cargo clippy -- -D warnings
workingDirectory: cli
displayName: Clippy lint
- script: cargo test
workingDirectory: cli
displayName: Run unit tests

View file

@ -0,0 +1 @@
vcpkg_installed

View file

@ -0,0 +1,9 @@
{
"name": "vscode-cli",
"dependencies": [
{
"name": "openssl"
}
],
"builtin-baseline": "23cc58477e200bb54c293ad76f3ce438dbabc86c"
}

View file

@ -43,6 +43,8 @@ function getPlatform(product, os, arch, type) {
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
case 'cli':
return `cli-win32-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -52,6 +54,8 @@ function getPlatform(product, os, arch, type) {
return `server-alpine-${arch}`;
case 'web':
return `server-alpine-${arch}-web`;
case 'cli':
return `cli-alpine-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -74,6 +78,8 @@ function getPlatform(product, os, arch, type) {
return `linux-deb-${arch}`;
case 'rpm-package':
return `linux-rpm-${arch}`;
case 'cli':
return `cli-linux-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -94,6 +100,8 @@ function getPlatform(product, os, arch, type) {
return 'server-darwin-web';
}
return `server-darwin-${arch}-web`;
case 'cli':
return `cli-darwin-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}

View file

@ -56,6 +56,8 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
case 'cli':
return `cli-win32-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -65,6 +67,8 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
return `server-alpine-${arch}`;
case 'web':
return `server-alpine-${arch}-web`;
case 'cli':
return `cli-alpine-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -87,6 +91,8 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
return `linux-deb-${arch}`;
case 'rpm-package':
return `linux-rpm-${arch}`;
case 'cli':
return `cli-linux-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
@ -107,6 +113,8 @@ function getPlatform(product: string, os: string, arch: string, type: string): s
return 'server-darwin-web';
}
return `server-darwin-${arch}-web`;
case 'cli':
return `cli-darwin-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}

View file

@ -0,0 +1,46 @@
parameters:
- name: VSCODE_QUALITY
type: string
- name: VSCODE_BUILD_MACOS
type: boolean
- name: VSCODE_BUILD_MACOS_ARM64
type: boolean
- name: channel
type: string
default: stable
steps:
- task: NodeTool@0
inputs:
versionSpec: "16.x"
- template: ../mixin-distro-posix.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- script: |
set -e
node build/azure-pipelines/cli/prepare.js
displayName: Prepare CLI build
env:
GITHUB_TOKEN: "$(github-distro-mixin-password)"
- template: ../cli/install-rust-posix.yml
parameters:
targets:
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- x86_64-apple-darwin
- ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}:
- aarch64-apple-darwin
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: x86_64-apple-darwin
VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_darwin_x64_cli
- ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: aarch64-apple-darwin
VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_darwin_arm64_cli

View file

@ -0,0 +1,45 @@
parameters:
- name: VSCODE_BUILD_MACOS
type: boolean
- name: VSCODE_BUILD_MACOS_ARM64
type: boolean
steps:
- task: NodeTool@0
inputs:
versionSpec: "16.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
retryCountOnTaskFailure: 3
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
for i in {1..3}; do # try 3 times, for Terrapin
yarn --cwd build --frozen-lockfile --check-files && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install build dependencies
- template: ../cli/cli-darwin-sign.yml
parameters:
VSCODE_CLI_ARTIFACTS:
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- unsigned_vscode_cli_darwin_x64_cli
- ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}:
- unsigned_vscode_cli_darwin_arm64_cli

View file

@ -9,6 +9,8 @@ parameters:
type: boolean
- name: VSCODE_RUN_SMOKE_TESTS
type: boolean
- name: VSCODE_BUILD_TUNNEL_CLI
type: boolean
steps:
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
@ -173,6 +175,40 @@ steps:
yarn gulp "transpile-client-swc" "transpile-extensions"
displayName: Transpile
- script: |
set -e
APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)"
APP_NAME="`ls $APP_ROOT | head -n 1`"
echo "##vso[task.setvariable variable=APP_PATH]$APP_ROOT/$APP_NAME"
displayName: Find application path
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- task: DownloadPipelineArtifact@2
inputs:
artifact: unsigned_vscode_cli_darwin_arm64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- task: DownloadPipelineArtifact@2
inputs:
artifact: unsigned_vscode_cli_darwin_x64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
ARCHIVE_NAME=$(ls "$(Build.ArtifactStagingDirectory)/cli" | head -n 1)
unzip "$(Build.ArtifactStagingDirectory)/cli/$ARCHIVE_NAME" -d "$(APP_PATH)/Contents/Resources/app/bin"
chmod +x "$(APP_PATH)/Contents/Resources/app/bin/code-tunnel"
if [ "$(VSCODE_QUALITY)" != "stable" ]; then
mv "$(APP_PATH)/Contents/Resources/app/bin/code-tunnel" "$(APP_PATH)/Contents/Resources/app/bin/code-tunnel-$(VSCODE_QUALITY)"
fi
displayName: Make CLI executable
- ${{ if or(eq(parameters.VSCODE_RUN_UNIT_TESTS, true), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
- template: product-build-darwin-test.yml
parameters:

View file

@ -0,0 +1,84 @@
parameters:
- name: VSCODE_BUILD_LINUX_ALPINE
type: boolean
default: false
- name: VSCODE_BUILD_LINUX
type: boolean
default: false
- name: VSCODE_BUILD_LINUX_ALPINE_ARM64
type: boolean
default: false
- name: VSCODE_BUILD_LINUX_ARM64
type: boolean
default: false
- name: VSCODE_QUALITY
type: string
- name: channel
type: string
default: stable
steps:
# inspired by: https://github.com/emk/rust-musl-builder/blob/main/Dockerfile
- bash: |
set -e
sudo apt-get update
sudo apt-get install -yq build-essential cmake curl file git graphviz musl-dev musl-tools linux-libc-dev pkgconf unzip xutils-dev
sudo ln -s "/usr/bin/g++" "/usr/bin/musl-g++" || echo "link exists"
displayName: Install build dependencies
- task: NodeTool@0
inputs:
versionSpec: "16.x"
- template: ../mixin-distro-posix.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- script: |
set -e
node build/azure-pipelines/cli/prepare.js
displayName: Prepare CLI build
env:
GITHUB_TOKEN: "$(github-distro-mixin-password)"
- template: ../cli/install-rust-posix.yml
parameters:
targets:
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ALPINE_ARM64, true) }}:
- aarch64-unknown-linux-musl
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true) }}:
- x86_64-unknown-linux-musl
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARM64, true) }}:
- aarch64-unknown-linux-gnu
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
- x86_64-unknown-linux-gnu
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ALPINE_ARM64, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: aarch64-unknown-linux-musl
VSCODE_CLI_ARTIFACT: vscode_cli_alpine_arm64_cli
VSCODE_CLI_ENV:
CXX_aarch64-unknown-linux-musl: musl-g++
CC_aarch64-unknown-linux-musl: musl-gcc
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: x86_64-unknown-linux-musl
VSCODE_CLI_ARTIFACT: vscode_cli_alpine_x64_cli
VSCODE_CLI_ENV:
CXX_aarch64-unknown-linux-musl: musl-g++
CC_aarch64-unknown-linux-musl: musl-gcc
- ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARM64, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: aarch64-unknown-linux-gnu
VSCODE_CLI_ARTIFACT: vscode_cli_linux_arm64_cli
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: x86_64-unknown-linux-gnu
VSCODE_CLI_ARTIFACT: vscode_cli_linux_x64_cli

View file

@ -9,6 +9,8 @@ parameters:
type: boolean
- name: VSCODE_RUN_SMOKE_TESTS
type: boolean
- name: VSCODE_BUILD_TUNNEL_CLI
type: boolean
steps:
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
@ -252,6 +254,31 @@ steps:
yarn gulp "transpile-client-swc" "transpile-extensions"
displayName: Transpile
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- task: DownloadPipelineArtifact@2
inputs:
artifact: vscode_cli_linux_arm64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- task: DownloadPipelineArtifact@2
inputs:
artifact: vscode_cli_linux_x64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
tar -xzvf $(Build.ArtifactStagingDirectory)/cli/*.tar.gz -C $(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/bin
if [ "$(VSCODE_QUALITY)" != "stable" ]; then
mv "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/bin/code-tunnel" "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/bin/code-tunnel-$(VSCODE_QUALITY)"
fi
displayName: Make CLI executable
- ${{ if or(eq(parameters.VSCODE_RUN_UNIT_TESTS, true), eq(parameters.VSCODE_RUN_INTEGRATION_TESTS, true), eq(parameters.VSCODE_RUN_SMOKE_TESTS, true)) }}:
- template: product-build-linux-client-test.yml
parameters:

View file

@ -0,0 +1,40 @@
parameters:
- name: VSCODE_QUALITY
type: string
steps:
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password"
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
git checkout FETCH_HEAD
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
displayName: Checkout override commit
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro

View file

@ -0,0 +1,40 @@
parameters:
- name: VSCODE_QUALITY
type: string
steps:
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password"
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
displayName: Prepare tooling
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $(VSCODE_DISTRO_REF) }
Write-Host "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
exec { git checkout FETCH_HEAD }
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
displayName: Checkout override commit
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
displayName: Merge distro

View file

@ -48,6 +48,7 @@ jobs:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: true
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -65,6 +66,7 @@ jobs:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: true
VSCODE_RUN_SMOKE_TESTS: false
@ -82,6 +84,7 @@ jobs:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: true
@ -96,6 +99,14 @@ jobs:
steps:
- template: product-build-pr-cache.yml
- ${{ if eq(variables['VSCODE_CIBUILD'], true) }}:
- job: LinuxCLI
displayName: Linux (CLI)
pool: vscode-1es-vscode-linux-20.04
timeoutInMinutes: 30
steps:
- template: cli/test.yml
# - job: macOSUnitTest
# displayName: macOS (Unit Tests)
# pool:

View file

@ -77,6 +77,10 @@ parameters:
displayName: "🎯 Web"
type: boolean
default: true
- name: VSCODE_BUILD_TUNNEL_CLI
displayName: "Build Tunnel CLI"
type: boolean
default: false
- name: VSCODE_PUBLISH
displayName: "Publish to builds.code.visualstudio.com"
type: boolean
@ -171,10 +175,71 @@ stages:
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- stage: CompileCLI
dependsOn: []
jobs:
- ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true)) }}:
- job: LinuxX86
pool: vscode-1es-linux
variables:
VSCODE_ARCH: x64
steps:
- template: ./linux/cli-build-linux.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_LINUX_ALPINE: ${{ parameters.VSCODE_BUILD_LINUX_ALPINE }}
VSCODE_BUILD_LINUX: ${{ parameters.VSCODE_BUILD_LINUX }}
- ${{ if or(eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE_ARM64, true)) }}:
- job: LinuxArm64
pool: vscode-1es-linux-20.04-arm64
variables:
VSCODE_ARCH: arm64
steps:
- task: NodeTool@0
displayName: Install Node.js
inputs:
versionSpec: 16.x
- script: |
set -e
npm install -g yarn
sudo apt update -y
sudo apt install -y build-essential pkg-config
displayName: Install build dependencies
- template: ./linux/cli-build-linux.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_LINUX_ALPINE_ARM64: ${{ parameters.VSCODE_BUILD_LINUX_ALPINE_ARM64 }}
VSCODE_BUILD_LINUX_ARM64: ${{ parameters.VSCODE_BUILD_LINUX_ARM64 }}
- ${{ if eq(variables.VSCODE_BUILD_STAGE_MACOS, true) }}:
- job: MacOS
pool:
vmImage: macOS-latest
steps:
- template: ./darwin/cli-build-darwin.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_MACOS: ${{ parameters.VSCODE_BUILD_MACOS }}
VSCODE_BUILD_MACOS_ARM64: ${{ parameters.VSCODE_BUILD_MACOS_ARM64 }}
- ${{ if eq(variables.VSCODE_BUILD_STAGE_WINDOWS, true) }}:
- job: Windows
pool: vscode-1es-windows
steps:
- template: ./win32/cli-build-win32.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_WIN32: ${{ parameters.VSCODE_BUILD_WIN32 }}
VSCODE_BUILD_WIN32_ARM64: ${{ parameters.VSCODE_BUILD_WIN32_ARM64 }}
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}:
- stage: Windows
dependsOn:
- Compile
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- CompileCLI
pool: vscode-1es-windows
jobs:
- ${{ if eq(variables['VSCODE_CIBUILD'], true) }}:
@ -188,6 +253,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: true
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -201,6 +267,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: true
VSCODE_RUN_SMOKE_TESTS: false
@ -214,6 +281,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: true
@ -228,6 +296,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_INTEGRATION_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_SMOKE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
@ -242,6 +311,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false # todo until 32 bit CLI is available
VSCODE_RUN_UNIT_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_INTEGRATION_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_SMOKE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
@ -256,10 +326,20 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true)) }}:
- job: windowsCLISign
timeoutInMinutes: 90
steps:
- template: win32/product-build-win32-cli-sign.yml
parameters:
VSCODE_BUILD_WIN32: ${{ parameters.VSCODE_BUILD_WIN32 }}
VSCODE_BUILD_WIN32_ARM64: ${{ parameters.VSCODE_BUILD_WIN32_ARM64 }}
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}:
- stage: LinuxServerDependencies
dependsOn: [] # run in parallel to compile stage
@ -290,6 +370,8 @@ stages:
dependsOn:
- Compile
- LinuxServerDependencies
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- CompileCLI
pool: vscode-1es-linux
jobs:
- ${{ if eq(variables['VSCODE_CIBUILD'], true) }}:
@ -305,6 +387,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: true
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -320,6 +403,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: true
VSCODE_RUN_SMOKE_TESTS: false
@ -335,6 +419,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: true
@ -351,6 +436,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_INTEGRATION_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_SMOKE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
@ -376,6 +462,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false # todo: not built yet
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -402,6 +489,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -436,6 +524,8 @@ stages:
- stage: macOS
dependsOn:
- Compile
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- CompileCLI
pool:
vmImage: macOS-latest
variables:
@ -452,6 +542,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: true
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -465,6 +556,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: true
VSCODE_RUN_SMOKE_TESTS: false
@ -478,6 +570,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: true
@ -492,6 +585,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false
@ -506,6 +600,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: false
VSCODE_RUN_UNIT_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_INTEGRATION_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
VSCODE_RUN_SMOKE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }}
@ -520,6 +615,15 @@ stages:
steps:
- template: darwin/product-build-darwin-sign.yml
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true)) }}:
- job: macOSCLISign
timeoutInMinutes: 90
steps:
- template: darwin/product-build-darwin-cli-sign.yml
parameters:
VSCODE_BUILD_MACOS: ${{ parameters.VSCODE_BUILD_MACOS }}
VSCODE_BUILD_MACOS_ARM64: ${{ parameters.VSCODE_BUILD_MACOS_ARM64 }}
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
- job: macOSARM64
timeoutInMinutes: 90
@ -530,6 +634,7 @@ stages:
parameters:
VSCODE_PUBLISH: ${{ variables.VSCODE_PUBLISH }}
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
VSCODE_BUILD_TUNNEL_CLI: ${{ parameters.VSCODE_BUILD_TUNNEL_CLI }}
VSCODE_RUN_UNIT_TESTS: false
VSCODE_RUN_INTEGRATION_TESTS: false
VSCODE_RUN_SMOKE_TESTS: false

View file

@ -7,41 +7,9 @@ steps:
inputs:
versionSpec: "16.x"
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password"
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
git fetch https://github.com/$(VSCODE_MIXIN_REPO).git $VSCODE_DISTRO_REF
echo "##vso[task.setvariable variable=VSCODE_DISTRO_COMMIT;]$(git rev-parse FETCH_HEAD)"
git checkout FETCH_HEAD
condition: and(succeeded(), ne(variables.VSCODE_DISTRO_REF, ' '))
displayName: Checkout override commit
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- template: ./mixin-distro-posix.yml
parameters:
VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }}
- script: |
mkdir -p .build

View file

@ -0,0 +1,38 @@
parameters:
- name: targets
default: []
type: object
- name: vcpkgDir
type: string
- name: targetDirectory
type: string
steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
exec { git clone https://github.com/microsoft/vcpkg.git $(Build.ArtifactStagingDirectory)/vcpkg }
exec { cd $(Build.ArtifactStagingDirectory)/vcpkg; git checkout 779ce74ef67d3e12d904da1b15f9ed5626d5f677 }
exec { $(Build.ArtifactStagingDirectory)/vcpkg/bootstrap-vcpkg.bat }
Write-Output "##vso[task.setvariable variable=VSCODE_DID_BOOTSTRAP_VCPKG]true"
displayName: Bootstrap vcpkg
condition: not(eq(variables.VSCODE_DID_BOOTSTRAP_VCPKG, true))
- ${{ each target in parameters.targets }}:
- task: Cache@2
inputs:
key: '"${{ target }}" | ${{ parameters.vcpkgDir }}/vcpkg.json'
path: ${{ parameters.targetDirectory }}/${{ target }}
cacheHitVar: VCPKG_CACHE_RESTORED
displayName: Cache ${{ target }}
- script: $(Build.ArtifactStagingDirectory)/vcpkg/vcpkg.exe install --triplet=${{ target }}
displayName: vcpkg install ${{ target }}
workingDirectory: ${{ parameters.vcpkgDir }}
condition: ne(variables.VCPKG_CACHE_RESTORED, 'true')
- powershell: |
mkdir -p ${{ parameters.targetDirectory }} -Force
Move-Item -Path ${{ parameters.vcpkgDir }}/vcpkg_installed/${{ target }} -Destination ${{ parameters.targetDirectory }}
Remove-Item ${{ parameters.vcpkgDir }}/vcpkg_installed -Recurse -Force
displayName: Stage vcpkg dependencies for ${{ target }}
condition: ne(variables.VCPKG_CACHE_RESTORED, 'true')

View file

@ -0,0 +1,63 @@
parameters:
- name: VSCODE_BUILD_WIN32
type: boolean
- name: VSCODE_BUILD_WIN32_ARM64
type: boolean
- name: VSCODE_QUALITY
type: string
- name: channel
type: string
default: stable
steps:
- task: NodeTool@0
inputs:
versionSpec: "16.x"
- template: ../mixin-distro-win32.yml
parameters:
VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }}
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/cli/prepare.js }
displayName: Prepare CLI build
env:
GITHUB_TOKEN: "$(github-distro-mixin-password)"
- template: ../cli/install-rust-win32.yml
parameters:
targets:
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- x86_64-pc-windows-msvc
- ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}:
- aarch64-pc-windows-msvc
- template: ../vcpkg-install.yml
parameters:
targets:
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- x64-windows-static-md
- ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}:
- arm64-windows-static-md
vcpkgDir: $(Build.SourcesDirectory)/build/azure-pipelines/cli/vcpkg
targetDirectory: $(Build.ArtifactStagingDirectory)/deps
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: x86_64-pc-windows-msvc
VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_win32_x64_cli
VSCODE_CLI_ENV:
OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/deps/x64-windows-static-md/lib
OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/deps/x64-windows-static-md/include
- ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}:
- template: ../cli/cli-compile-and-publish.yml
parameters:
VSCODE_CLI_TARGET: aarch64-pc-windows-msvc
VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_win32_arm64_cli
VSCODE_CLI_ENV:
OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/deps/arm64-windows-static-md/lib
OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/deps/arm64-windows-static-md/include

View file

@ -0,0 +1,32 @@
parameters:
- name: VSCODE_BUILD_WIN32
type: boolean
- name: VSCODE_BUILD_WIN32_ARM64
type: boolean
variables:
- name: VSCODE_CLI_ARTIFACTS
value:
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- unsigned_vscode_cli_win32_x64_cli
- ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}:
- unsigned_vscode_cli_win32_arm64_cli
steps:
- task: NodeTool@0
displayName: "Use Node.js"
inputs:
versionSpec: "16.x"
- pwsh: |
. build/azure-pipelines/win32/exec.ps1
cd build
exec { yarn }
displayName: Install build dependencies
- template: ../cli/cli-win32-sign.yml
parameters:
VSCODE_CLI_ARTIFACTS:
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- unsigned_vscode_cli_win32_x64_cli
- ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}:
- unsigned_vscode_cli_win32_arm64_cli

View file

@ -9,6 +9,8 @@ parameters:
type: boolean
- name: VSCODE_RUN_SMOKE_TESTS
type: boolean
- name: VSCODE_BUILD_TUNNEL_CLI
type: boolean
steps:
- ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}:
@ -175,6 +177,36 @@ steps:
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
displayName: Build
- ${{ if eq(parameters.VSCODE_BUILD_TUNNEL_CLI, true) }}:
- task: DownloadPipelineArtifact@2
inputs:
artifact: unsigned_vscode_cli_win32_arm64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- task: DownloadPipelineArtifact@2
inputs:
artifact: unsigned_vscode_cli_win32_x64_cli
patterns: '**'
path: $(Build.ArtifactStagingDirectory)/cli
displayName: Download VS Code CLI
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$ArtifactName = (gci -Path "$(Build.ArtifactStagingDirectory)/cli" | Select-Object -last 1).FullName
Expand-Archive -Path $ArtifactName -DestinationPath "$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)/bin"
if ("$(VSCODE_QUALITY)" -ne "stable")
{
Move-Item -Path "$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)/bin/code-tunnel.exe" -Destination "$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)/bin/code-tunnel-$(VSCODE_QUALITY).exe"
}
displayName: Move VS Code CLI
- ${{ if eq(parameters.VSCODE_PUBLISH, true) }}:
- powershell: |
. build/azure-pipelines/win32/exec.ps1

View file

@ -6,6 +6,7 @@
const gulp = require('gulp');
const path = require('path');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const optimize = require('./lib/optimize');
const es = require('event-stream');
@ -18,7 +19,7 @@ const monacoapi = require('./lib/monaco-api');
const fs = require('fs');
const root = path.dirname(__dirname);
const sha1 = util.getVersion(root);
const sha1 = getVersion(root);
const semver = require('./monaco/package.json').version;
const headerVersion = semver + '(' + sha1 + ')';

View file

@ -12,12 +12,13 @@ const nodeUtil = require('util');
const es = require('event-stream');
const filter = require('gulp-filter');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const watcher = require('./lib/watch');
const createReporter = require('./lib/reporter').createReporter;
const glob = require('glob');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const commit = getVersion(root);
const plumber = require('gulp-plumber');
const ext = require('./lib/extensions');

View file

@ -9,6 +9,7 @@ const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const optimize = require('./lib/optimize');
const product = require('../product.json');
@ -30,7 +31,7 @@ const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileCont
const cp = require('child_process');
const REPO_ROOT = path.dirname(__dirname);
const commit = util.getVersion(REPO_ROOT);
const commit = getVersion(REPO_ROOT);
const BUILD_ROOT = path.dirname(REPO_ROOT);
const REMOTE_FOLDER = path.join(REPO_ROOT, 'remote');

View file

@ -18,11 +18,12 @@ const replace = require('gulp-replace');
const filter = require('gulp-filter');
const _ = require('underscore');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const buildfile = require('../src/buildfile');
const optimize = require('./lib/optimize');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const commit = getVersion(root);
const packageJson = require('../package.json');
const product = require('../product.json');
const crypto = require('crypto');

View file

@ -12,6 +12,7 @@ const shell = require('gulp-shell');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const packageJson = require('../package.json');
const product = require('../product.json');
@ -20,7 +21,7 @@ const sysrootInstaller = require('./linux/debian/install-sysroot');
const debianRecommendedDependencies = require('./linux/debian/dep-lists').recommendedDeps;
const path = require('path');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const commit = getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);

View file

@ -9,6 +9,7 @@ const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const { getVersion } = require('./lib/getVersion');
const task = require('./lib/task');
const optimize = require('./lib/optimize');
const product = require('../product.json');
@ -26,7 +27,7 @@ const REPO_ROOT = path.dirname(__dirname);
const BUILD_ROOT = path.dirname(REPO_ROOT);
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
const commit = util.getVersion(REPO_ROOT);
const commit = getVersion(REPO_ROOT);
const quality = product.quality;
const version = (quality && quality !== 'stable') ? `${packageJson.version}-${quality}` : packageJson.version;

View file

@ -11,12 +11,13 @@ const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const _ = require("underscore");
const util = require("./util");
const getVersion_1 = require("./getVersion");
function isDocumentSuffix(str) {
return str === 'document' || str === 'script' || str === 'file' || str === 'source code';
}
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
const commit = (0, getVersion_1.getVersion)(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
/**
* Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name.

View file

@ -9,6 +9,7 @@ import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as _ from 'underscore';
import * as util from './util';
import { getVersion } from './getVersion';
type DarwinDocumentSuffix = 'document' | 'script' | 'file' | 'source code';
type DarwinDocumentType = {
@ -26,7 +27,7 @@ function isDocumentSuffix(str?: string): str is DarwinDocumentSuffix {
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
const commit = getVersion(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));

View file

@ -26,9 +26,9 @@ const jsoncParser = require("jsonc-parser");
const dependencies_1 = require("./dependencies");
const _ = require("underscore");
const builtInExtensions_1 = require("./builtInExtensions");
const util = require('./util');
const getVersion_1 = require("./getVersion");
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const commit = (0, getVersion_1.getVersion)(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function minifyExtensionResources(input) {
const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true });

View file

@ -26,9 +26,10 @@ import webpack = require('webpack');
import { getProductionDependencies } from './dependencies';
import _ = require('underscore');
import { getExtensionStream } from './builtInExtensions';
const util = require('./util');
import { getVersion } from './getVersion';
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const commit = getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function minifyExtensionResources(input: Stream): Stream {

16
build/lib/getVersion.js Normal file
View file

@ -0,0 +1,16 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getVersion = void 0;
const git = require("./git");
function getVersion(root) {
let version = process.env['VSCODE_DISTRO_COMMIT'] || process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) {
version = git.getVersion(root);
}
return version;
}
exports.getVersion = getVersion;

16
build/lib/getVersion.ts Normal file
View file

@ -0,0 +1,16 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as git from './git';
export function getVersion(root: string): string | undefined {
let version = process.env['VSCODE_DISTRO_COMMIT'] || process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) {
version = git.getVersion(root);
}
return version;
}

View file

@ -4,7 +4,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildWebNodePaths = exports.createExternalLoaderConfig = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.debounce = exports.incremental = void 0;
exports.buildWebNodePaths = exports.createExternalLoaderConfig = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.debounce = exports.incremental = void 0;
const es = require("event-stream");
const _debounce = require("debounce");
const _filter = require("gulp-filter");
@ -13,7 +13,6 @@ const path = require("path");
const fs = require("fs");
const _rimraf = require("rimraf");
const VinylFile = require("vinyl");
const git = require("./git");
const root = path.dirname(path.dirname(__dirname));
const NoCancellationToken = { isCancellationRequested: () => false };
function incremental(streamProvider, initial, supportsCancellation) {
@ -253,14 +252,6 @@ function ensureDir(dirPath) {
fs.mkdirSync(dirPath);
}
exports.ensureDir = ensureDir;
function getVersion(root) {
let version = process.env['VSCODE_DISTRO_COMMIT'] || process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) {
version = git.getVersion(root);
}
return version;
}
exports.getVersion = getVersion;
function rebase(count) {
return rename(f => {
const parts = f.dirname ? f.dirname.split(/[\/\\]/) : [];

View file

@ -14,7 +14,6 @@ import * as _rimraf from 'rimraf';
import * as VinylFile from 'vinyl';
import { ThroughStream } from 'through';
import * as sm from 'source-map';
import * as git from './git';
const root = path.dirname(path.dirname(__dirname));
@ -317,16 +316,6 @@ export function ensureDir(dirPath: string): void {
fs.mkdirSync(dirPath);
}
export function getVersion(root: string): string | undefined {
let version = process.env['VSCODE_DISTRO_COMMIT'] || process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version.trim())) {
version = git.getVersion(root);
}
return version;
}
export function rebase(count: number): NodeJS.ReadWriteStream {
return rename(f => {
const parts = f.dirname ? f.dirname.split(/[\/\\]/) : [];

View file

@ -62,4 +62,5 @@ lto = true
codegen-units = 1
[features]
default = []
vscode-encrypt = []

View file

@ -1,8 +0,0 @@
[build.env]
passthrough = [
"LAUNCHER_VERSION",
"LAUNCHER_ASSET_NAME",
]
[target.aarch64-unknown-linux-gnu]
image = "microsoft/vscode-server-launcher-xbuild:aarch64"

View file

@ -3,55 +3,56 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const FILE_HEADER: &[u8] = b"/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/";
const FILE_HEADER: &str = "/*---------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/";
use std::{env, fs, io, path::PathBuf, process};
fn main() {
let files = enumerate_source_files().expect("expected to enumerate files");
ensure_file_headers(&files).expect("expected to ensure file headers");
let files = enumerate_source_files().expect("expected to enumerate files");
ensure_file_headers(&files).expect("expected to ensure file headers");
}
fn ensure_file_headers(files: &[PathBuf]) -> Result<(), io::Error> {
let mut ok = true;
for file in files {
let contents = fs::read(file)?;
let mut ok = true;
if !contents.starts_with(FILE_HEADER) {
eprintln!("File missing copyright header: {}", file.display());
ok = false;
}
}
let crlf_header_str = str::replace(FILE_HEADER, "\n", "\r\n");
let crlf_header = crlf_header_str.as_bytes();
let lf_header = FILE_HEADER.as_bytes();
for file in files {
let contents = fs::read(file)?;
if !ok {
process::exit(1);
}
if !(contents.starts_with(lf_header) || contents.starts_with(crlf_header)) {
eprintln!("File missing copyright header: {}", file.display());
ok = false;
}
}
Ok(())
if !ok {
process::exit(1);
}
Ok(())
}
/// Gets all "rs" files in the source directory
fn enumerate_source_files() -> Result<Vec<PathBuf>, io::Error> {
let mut files = vec![];
let mut queue = vec![];
let mut files = vec![];
let mut queue = vec![];
let current_dir = env::current_dir()?.join("src");
queue.push(current_dir);
let current_dir = env::current_dir()?.join("src");
queue.push(current_dir);
while !queue.is_empty() {
for entry in fs::read_dir(queue.pop().unwrap())? {
let entry = entry?;
let ftype = entry.file_type()?;
if ftype.is_dir() {
queue.push(entry.path());
} else if ftype.is_file() && entry.file_name().to_string_lossy().ends_with(".rs") {
files.push(entry.path());
}
}
}
while !queue.is_empty() {
for entry in fs::read_dir(queue.pop().unwrap())? {
let entry = entry?;
let ftype = entry.file_type()?;
if ftype.is_dir() {
queue.push(entry.path());
} else if ftype.is_file() && entry.file_name().to_string_lossy().ends_with(".rs") {
files.push(entry.path());
}
}
}
Ok(files)
Ok(files)
}

1
cli/rustfmt.toml Normal file
View file

@ -0,0 +1 @@
hard_tabs = true

File diff suppressed because it is too large Load diff

View file

@ -5,9 +5,9 @@
use clap::Parser;
use cli::{
commands::{args, tunnels, CommandContext},
constants, log as own_log,
state::LauncherPaths,
commands::{args, tunnels, CommandContext},
constants, log as own_log,
state::LauncherPaths,
};
use opentelemetry::sdk::trace::TracerProvider as SdkTracerProvider;
use opentelemetry::trace::TracerProvider;
@ -18,14 +18,14 @@ use log::{Level, Metadata, Record};
#[clap(
long_about = None,
name = "Visual Studio Code Tunnels CLI",
version = match constants::LAUNCHER_VERSION { Some(v) => v, None => "dev" },
version = match constants::VSCODE_CLI_VERSION { Some(v) => v, None => "dev" },
)]
pub struct TunnelCli {
#[clap(flatten, next_help_heading = Some("GLOBAL OPTIONS"))]
pub global_options: args::GlobalOptions,
#[clap(flatten, next_help_heading = Some("GLOBAL OPTIONS"))]
pub global_options: args::GlobalOptions,
#[clap(flatten, next_help_heading = Some("TUNNEL OPTIONS"))]
pub tunnel_options: args::TunnelArgs,
#[clap(flatten, next_help_heading = Some("TUNNEL OPTIONS"))]
pub tunnel_options: args::TunnelArgs,
}
/// Entrypoint for a standalone "code-tunnel" subcommand. This is a temporary
@ -33,56 +33,56 @@ pub struct TunnelCli {
/// code in here is duplicated from `src/bin/code/main.rs`
#[tokio::main]
async fn main() -> Result<(), std::convert::Infallible> {
let parsed = TunnelCli::parse();
let context = CommandContext {
http: reqwest::Client::new(),
paths: LauncherPaths::new(&parsed.global_options.cli_data_dir).unwrap(),
log: own_log::Logger::new(
SdkTracerProvider::builder().build().tracer("codecli"),
if parsed.global_options.verbose {
own_log::Level::Trace
} else {
parsed.global_options.log.unwrap_or(own_log::Level::Info)
},
),
args: args::Cli {
global_options: parsed.global_options,
subcommand: Some(args::Commands::Tunnel(parsed.tunnel_options.clone())),
..Default::default()
},
};
let parsed = TunnelCli::parse();
let context = CommandContext {
http: reqwest::Client::new(),
paths: LauncherPaths::new(&parsed.global_options.cli_data_dir).unwrap(),
log: own_log::Logger::new(
SdkTracerProvider::builder().build().tracer("codecli"),
if parsed.global_options.verbose {
own_log::Level::Trace
} else {
parsed.global_options.log.unwrap_or(own_log::Level::Info)
},
),
args: args::Cli {
global_options: parsed.global_options,
subcommand: Some(args::Commands::Tunnel(parsed.tunnel_options.clone())),
..Default::default()
},
};
log::set_logger(Box::leak(Box::new(RustyLogger(context.log.clone()))))
.map(|()| log::set_max_level(log::LevelFilter::Debug))
.expect("expected to make logger");
log::set_logger(Box::leak(Box::new(RustyLogger(context.log.clone()))))
.map(|()| log::set_max_level(log::LevelFilter::Debug))
.expect("expected to make logger");
let result = match parsed.tunnel_options.subcommand {
Some(args::TunnelSubcommand::Prune) => tunnels::prune(context).await,
Some(args::TunnelSubcommand::Unregister) => tunnels::unregister(context).await,
Some(args::TunnelSubcommand::Rename(rename_args)) => {
tunnels::rename(context, rename_args).await
}
Some(args::TunnelSubcommand::User(user_command)) => {
tunnels::user(context, user_command).await
}
Some(args::TunnelSubcommand::Service(service_args)) => {
tunnels::service(context, service_args).await
}
None => tunnels::serve(context, parsed.tunnel_options.serve_args).await,
};
let result = match parsed.tunnel_options.subcommand {
Some(args::TunnelSubcommand::Prune) => tunnels::prune(context).await,
Some(args::TunnelSubcommand::Unregister) => tunnels::unregister(context).await,
Some(args::TunnelSubcommand::Rename(rename_args)) => {
tunnels::rename(context, rename_args).await
}
Some(args::TunnelSubcommand::User(user_command)) => {
tunnels::user(context, user_command).await
}
Some(args::TunnelSubcommand::Service(service_args)) => {
tunnels::service(context, service_args).await
}
None => tunnels::serve(context, parsed.tunnel_options.serve_args).await,
};
match result {
Err(e) => print_and_exit(e),
Ok(code) => std::process::exit(code),
}
match result {
Err(e) => print_and_exit(e),
Ok(code) => std::process::exit(code),
}
}
fn print_and_exit<E>(err: E) -> !
where
E: std::fmt::Display,
E: std::fmt::Display,
{
own_log::emit(own_log::Level::Error, "", &format!("{}", err));
std::process::exit(1);
own_log::emit(own_log::Level::Error, "", &format!("{}", err));
std::process::exit(1);
}
/// Logger that uses the common rust "log" crate and directs back to one of
@ -90,34 +90,34 @@ where
struct RustyLogger(own_log::Logger);
impl log::Log for RustyLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Debug
}
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Debug
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata()) {
return;
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata()) {
return;
}
// exclude noisy log modules:
let src = match record.module_path() {
Some("russh::cipher") => return,
Some("russh::negotiation") => return,
Some(s) => s,
None => "<unknown>",
};
// exclude noisy log modules:
let src = match record.module_path() {
Some("russh::cipher") => return,
Some("russh::negotiation") => return,
Some(s) => s,
None => "<unknown>",
};
self.0.emit(
match record.level() {
log::Level::Debug => own_log::Level::Debug,
log::Level::Error => own_log::Level::Error,
log::Level::Info => own_log::Level::Info,
log::Level::Trace => own_log::Level::Trace,
log::Level::Warn => own_log::Level::Warn,
},
&format!("[{}] {}", src, record.args()),
);
}
self.0.emit(
match record.level() {
log::Level::Debug => own_log::Level::Debug,
log::Level::Error => own_log::Level::Error,
log::Level::Info => own_log::Level::Info,
log::Level::Trace => own_log::Level::Trace,
log::Level::Warn => own_log::Level::Warn,
},
&format!("[{}] {}", src, record.args()),
);
}
fn flush(&self) {}
fn flush(&self) {}
}

View file

@ -6,229 +6,229 @@
use std::collections::HashMap;
use cli::commands::args::{
Cli, Commands, DesktopCodeOptions, ExtensionArgs, ExtensionSubcommand, InstallExtensionArgs,
ListExtensionArgs, UninstallExtensionArgs,
Cli, Commands, DesktopCodeOptions, ExtensionArgs, ExtensionSubcommand, InstallExtensionArgs,
ListExtensionArgs, UninstallExtensionArgs,
};
/// Tries to parse the argv using the legacy CLI interface, looking for its
/// flags and generating a CLI with subcommands if those don't exist.
pub fn try_parse_legacy(
iter: impl IntoIterator<Item = impl Into<std::ffi::OsString>>,
iter: impl IntoIterator<Item = impl Into<std::ffi::OsString>>,
) -> Option<Cli> {
let raw = clap_lex::RawArgs::new(iter);
let mut cursor = raw.cursor();
raw.next(&mut cursor); // Skip the bin
let raw = clap_lex::RawArgs::new(iter);
let mut cursor = raw.cursor();
raw.next(&mut cursor); // Skip the bin
// First make a hashmap of all flags and capture positional arguments.
let mut args: HashMap<String, Vec<String>> = HashMap::new();
let mut last_arg = None;
while let Some(arg) = raw.next(&mut cursor) {
if let Some((long, value)) = arg.to_long() {
if let Ok(long) = long {
last_arg = Some(long.to_string());
match args.get_mut(long) {
Some(prev) => {
if let Some(v) = value {
prev.push(v.to_str_lossy().to_string());
}
}
None => {
if let Some(v) = value {
args.insert(long.to_string(), vec![v.to_str_lossy().to_string()]);
} else {
args.insert(long.to_string(), vec![]);
}
}
}
}
} else if let Ok(value) = arg.to_value() {
if let Some(last_arg) = &last_arg {
args.get_mut(last_arg)
.expect("expected to have last arg")
.push(value.to_string());
}
}
}
// First make a hashmap of all flags and capture positional arguments.
let mut args: HashMap<String, Vec<String>> = HashMap::new();
let mut last_arg = None;
while let Some(arg) = raw.next(&mut cursor) {
if let Some((long, value)) = arg.to_long() {
if let Ok(long) = long {
last_arg = Some(long.to_string());
match args.get_mut(long) {
Some(prev) => {
if let Some(v) = value {
prev.push(v.to_str_lossy().to_string());
}
}
None => {
if let Some(v) = value {
args.insert(long.to_string(), vec![v.to_str_lossy().to_string()]);
} else {
args.insert(long.to_string(), vec![]);
}
}
}
}
} else if let Ok(value) = arg.to_value() {
if let Some(last_arg) = &last_arg {
args.get_mut(last_arg)
.expect("expected to have last arg")
.push(value.to_string());
}
}
}
let get_first_arg_value =
|key: &str| args.get(key).and_then(|v| v.first()).map(|s| s.to_string());
let desktop_code_options = DesktopCodeOptions {
extensions_dir: get_first_arg_value("extensions-dir"),
user_data_dir: get_first_arg_value("user-data-dir"),
use_version: None,
};
let get_first_arg_value =
|key: &str| args.get(key).and_then(|v| v.first()).map(|s| s.to_string());
let desktop_code_options = DesktopCodeOptions {
extensions_dir: get_first_arg_value("extensions-dir"),
user_data_dir: get_first_arg_value("user-data-dir"),
use_version: None,
};
// Now translate them to subcommands.
// --list-extensions -> ext list
// --install-extension=id -> ext install <id>
// --uninstall-extension=id -> ext uninstall <id>
// --status -> status
// Now translate them to subcommands.
// --list-extensions -> ext list
// --install-extension=id -> ext install <id>
// --uninstall-extension=id -> ext uninstall <id>
// --status -> status
if args.contains_key("list-extensions") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::List(ListExtensionArgs {
category: get_first_arg_value("category"),
show_versions: args.contains_key("show-versions"),
}),
desktop_code_options,
})),
..Default::default()
})
} else if let Some(exts) = args.remove("install-extension") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::Install(InstallExtensionArgs {
id_or_path: exts,
pre_release: args.contains_key("pre-release"),
force: args.contains_key("force"),
}),
desktop_code_options,
})),
..Default::default()
})
} else if let Some(exts) = args.remove("uninstall-extension") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::Uninstall(UninstallExtensionArgs { id: exts }),
desktop_code_options,
})),
..Default::default()
})
} else if args.contains_key("status") {
Some(Cli {
subcommand: Some(Commands::Status),
..Default::default()
})
} else {
None
}
if args.contains_key("list-extensions") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::List(ListExtensionArgs {
category: get_first_arg_value("category"),
show_versions: args.contains_key("show-versions"),
}),
desktop_code_options,
})),
..Default::default()
})
} else if let Some(exts) = args.remove("install-extension") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::Install(InstallExtensionArgs {
id_or_path: exts,
pre_release: args.contains_key("pre-release"),
force: args.contains_key("force"),
}),
desktop_code_options,
})),
..Default::default()
})
} else if let Some(exts) = args.remove("uninstall-extension") {
Some(Cli {
subcommand: Some(Commands::Extension(ExtensionArgs {
subcommand: ExtensionSubcommand::Uninstall(UninstallExtensionArgs { id: exts }),
desktop_code_options,
})),
..Default::default()
})
} else if args.contains_key("status") {
Some(Cli {
subcommand: Some(Commands::Status),
..Default::default()
})
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
#[test]
fn test_parses_list_extensions() {
let args = vec![
"code",
"--list-extensions",
"--category",
"themes",
"--show-versions",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
#[test]
fn test_parses_list_extensions() {
let args = vec![
"code",
"--list-extensions",
"--category",
"themes",
"--show-versions",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::List(list_args) = extension_args.subcommand {
assert_eq!(list_args.category, Some("themes".to_string()));
assert!(list_args.show_versions);
} else {
panic!(
"Expected list subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::List(list_args) = extension_args.subcommand {
assert_eq!(list_args.category, Some("themes".to_string()));
assert!(list_args.show_versions);
} else {
panic!(
"Expected list subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
#[test]
fn test_parses_install_extension() {
let args = vec![
"code",
"--install-extension",
"connor4312.codesong",
"connor4312.hello-world",
"--pre-release",
"--force",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
#[test]
fn test_parses_install_extension() {
let args = vec![
"code",
"--install-extension",
"connor4312.codesong",
"connor4312.hello-world",
"--pre-release",
"--force",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::Install(install_args) = extension_args.subcommand {
assert_eq!(
install_args.id_or_path,
vec!["connor4312.codesong", "connor4312.hello-world"]
);
assert!(install_args.pre_release);
assert!(install_args.force);
} else {
panic!(
"Expected install subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::Install(install_args) = extension_args.subcommand {
assert_eq!(
install_args.id_or_path,
vec!["connor4312.codesong", "connor4312.hello-world"]
);
assert!(install_args.pre_release);
assert!(install_args.force);
} else {
panic!(
"Expected install subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
#[test]
fn test_parses_uninstall_extension() {
let args = vec!["code", "--uninstall-extension", "connor4312.codesong"];
let cli = try_parse_legacy(args.into_iter()).unwrap();
#[test]
fn test_parses_uninstall_extension() {
let args = vec!["code", "--uninstall-extension", "connor4312.codesong"];
let cli = try_parse_legacy(args.into_iter()).unwrap();
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::Uninstall(uninstall_args) = extension_args.subcommand {
assert_eq!(uninstall_args.id, vec!["connor4312.codesong"]);
} else {
panic!(
"Expected uninstall subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
if let ExtensionSubcommand::Uninstall(uninstall_args) = extension_args.subcommand {
assert_eq!(uninstall_args.id, vec!["connor4312.codesong"]);
} else {
panic!(
"Expected uninstall subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
#[test]
fn test_parses_user_data_dir_and_extensions_dir() {
let args = vec![
"code",
"--uninstall-extension",
"connor4312.codesong",
"--user-data-dir",
"foo",
"--extensions-dir",
"bar",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
#[test]
fn test_parses_user_data_dir_and_extensions_dir() {
let args = vec![
"code",
"--uninstall-extension",
"connor4312.codesong",
"--user-data-dir",
"foo",
"--extensions-dir",
"bar",
];
let cli = try_parse_legacy(args.into_iter()).unwrap();
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
assert_eq!(
extension_args.desktop_code_options.user_data_dir,
Some("foo".to_string())
);
assert_eq!(
extension_args.desktop_code_options.extensions_dir,
Some("bar".to_string())
);
if let ExtensionSubcommand::Uninstall(uninstall_args) = extension_args.subcommand {
assert_eq!(uninstall_args.id, vec!["connor4312.codesong"]);
} else {
panic!(
"Expected uninstall subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
if let Some(Commands::Extension(extension_args)) = cli.subcommand {
assert_eq!(
extension_args.desktop_code_options.user_data_dir,
Some("foo".to_string())
);
assert_eq!(
extension_args.desktop_code_options.extensions_dir,
Some("bar".to_string())
);
if let ExtensionSubcommand::Uninstall(uninstall_args) = extension_args.subcommand {
assert_eq!(uninstall_args.id, vec!["connor4312.codesong"]);
} else {
panic!(
"Expected uninstall subcommand, got {:?}",
extension_args.subcommand
);
}
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
#[test]
fn test_status() {
let args = vec!["code", "--status"];
let cli = try_parse_legacy(args.into_iter()).unwrap();
#[test]
fn test_status() {
let args = vec!["code", "--status"];
let cli = try_parse_legacy(args.into_iter()).unwrap();
if let Some(Commands::Status) = cli.subcommand {
// no-op
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
if let Some(Commands::Status) = cli.subcommand {
// no-op
} else {
panic!("Expected extension subcommand, got {:?}", cli.subcommand);
}
}
}

View file

@ -8,14 +8,14 @@ use std::process::Command;
use clap::Parser;
use cli::{
commands::{args, tunnels, version, CommandContext},
desktop, log as own_log,
state::LauncherPaths,
update_service::UpdateService,
util::{
errors::{wrap, AnyError},
prereqs::PreReqChecker,
},
commands::{args, tunnels, version, CommandContext},
desktop, log as own_log,
state::LauncherPaths,
update_service::UpdateService,
util::{
errors::{wrap, AnyError},
prereqs::PreReqChecker,
},
};
use legacy_args::try_parse_legacy;
use opentelemetry::sdk::trace::TracerProvider as SdkTracerProvider;
@ -25,110 +25,110 @@ use log::{Level, Metadata, Record};
#[tokio::main]
async fn main() -> Result<(), std::convert::Infallible> {
let raw_args = std::env::args_os().collect::<Vec<_>>();
let parsed = try_parse_legacy(&raw_args).unwrap_or_else(|| args::Cli::parse_from(&raw_args));
let context = CommandContext {
http: reqwest::Client::new(),
paths: LauncherPaths::new(&parsed.global_options.cli_data_dir).unwrap(),
log: own_log::Logger::new(
SdkTracerProvider::builder().build().tracer("codecli"),
if parsed.global_options.verbose {
own_log::Level::Trace
} else {
parsed.global_options.log.unwrap_or(own_log::Level::Info)
},
),
args: parsed,
};
let raw_args = std::env::args_os().collect::<Vec<_>>();
let parsed = try_parse_legacy(&raw_args).unwrap_or_else(|| args::Cli::parse_from(&raw_args));
let context = CommandContext {
http: reqwest::Client::new(),
paths: LauncherPaths::new(&parsed.global_options.cli_data_dir).unwrap(),
log: own_log::Logger::new(
SdkTracerProvider::builder().build().tracer("codecli"),
if parsed.global_options.verbose {
own_log::Level::Trace
} else {
parsed.global_options.log.unwrap_or(own_log::Level::Info)
},
),
args: parsed,
};
log::set_logger(Box::leak(Box::new(RustyLogger(context.log.clone()))))
.map(|()| log::set_max_level(log::LevelFilter::Debug))
.expect("expected to make logger");
log::set_logger(Box::leak(Box::new(RustyLogger(context.log.clone()))))
.map(|()| log::set_max_level(log::LevelFilter::Debug))
.expect("expected to make logger");
let result = match context.args.subcommand.clone() {
None => {
let ca = context.args.get_base_code_args();
start_code(context, ca).await
}
let result = match context.args.subcommand.clone() {
None => {
let ca = context.args.get_base_code_args();
start_code(context, ca).await
}
Some(args::Commands::Extension(extension_args)) => {
let mut ca = context.args.get_base_code_args();
extension_args.add_code_args(&mut ca);
start_code(context, ca).await
}
Some(args::Commands::Extension(extension_args)) => {
let mut ca = context.args.get_base_code_args();
extension_args.add_code_args(&mut ca);
start_code(context, ca).await
}
Some(args::Commands::Status) => {
let mut ca = context.args.get_base_code_args();
ca.push("--status".to_string());
start_code(context, ca).await
}
Some(args::Commands::Status) => {
let mut ca = context.args.get_base_code_args();
ca.push("--status".to_string());
start_code(context, ca).await
}
Some(args::Commands::Version(version_args)) => match version_args.subcommand {
args::VersionSubcommand::Use(use_version_args) => {
version::switch_to(context, use_version_args).await
}
args::VersionSubcommand::Uninstall(uninstall_version_args) => {
version::uninstall(context, uninstall_version_args).await
}
args::VersionSubcommand::List(list_version_args) => {
version::list(context, list_version_args).await
}
},
Some(args::Commands::Version(version_args)) => match version_args.subcommand {
args::VersionSubcommand::Use(use_version_args) => {
version::switch_to(context, use_version_args).await
}
args::VersionSubcommand::Uninstall(uninstall_version_args) => {
version::uninstall(context, uninstall_version_args).await
}
args::VersionSubcommand::List(list_version_args) => {
version::list(context, list_version_args).await
}
},
Some(args::Commands::Tunnel(tunnel_args)) => match tunnel_args.subcommand {
Some(args::TunnelSubcommand::Prune) => tunnels::prune(context).await,
Some(args::TunnelSubcommand::Unregister) => tunnels::unregister(context).await,
Some(args::TunnelSubcommand::Rename(rename_args)) => {
tunnels::rename(context, rename_args).await
}
Some(args::TunnelSubcommand::User(user_command)) => {
tunnels::user(context, user_command).await
}
Some(args::TunnelSubcommand::Service(service_args)) => {
tunnels::service(context, service_args).await
}
None => tunnels::serve(context, tunnel_args.serve_args).await,
},
};
Some(args::Commands::Tunnel(tunnel_args)) => match tunnel_args.subcommand {
Some(args::TunnelSubcommand::Prune) => tunnels::prune(context).await,
Some(args::TunnelSubcommand::Unregister) => tunnels::unregister(context).await,
Some(args::TunnelSubcommand::Rename(rename_args)) => {
tunnels::rename(context, rename_args).await
}
Some(args::TunnelSubcommand::User(user_command)) => {
tunnels::user(context, user_command).await
}
Some(args::TunnelSubcommand::Service(service_args)) => {
tunnels::service(context, service_args).await
}
None => tunnels::serve(context, tunnel_args.serve_args).await,
},
};
match result {
Err(e) => print_and_exit(e),
Ok(code) => std::process::exit(code),
}
match result {
Err(e) => print_and_exit(e),
Ok(code) => std::process::exit(code),
}
}
fn print_and_exit<E>(err: E) -> !
where
E: std::fmt::Display,
E: std::fmt::Display,
{
own_log::emit(own_log::Level::Error, "", &format!("{}", err));
std::process::exit(1);
own_log::emit(own_log::Level::Error, "", &format!("{}", err));
std::process::exit(1);
}
async fn start_code(context: CommandContext, args: Vec<String>) -> Result<i32, AnyError> {
let platform = PreReqChecker::new().verify().await?;
let version_manager = desktop::CodeVersionManager::new(&context.paths, platform);
let update_service = UpdateService::new(context.log.clone(), context.http.clone());
let version = match &context.args.editor_options.code_options.use_version {
Some(v) => desktop::RequestedVersion::try_from(v.as_str())?,
None => version_manager.get_preferred_version(),
};
let platform = PreReqChecker::new().verify().await?;
let version_manager = desktop::CodeVersionManager::new(&context.paths, platform);
let update_service = UpdateService::new(context.log.clone(), context.http.clone());
let version = match &context.args.editor_options.code_options.use_version {
Some(v) => desktop::RequestedVersion::try_from(v.as_str())?,
None => version_manager.get_preferred_version(),
};
let binary = match version_manager.try_get_entrypoint(&version).await {
Some(ep) => ep,
None => {
desktop::prompt_to_install(&version)?;
version_manager.install(&update_service, &version).await?
}
};
let binary = match version_manager.try_get_entrypoint(&version).await {
Some(ep) => ep,
None => {
desktop::prompt_to_install(&version)?;
version_manager.install(&update_service, &version).await?
}
};
let code = Command::new(binary)
.args(args)
.status()
.map(|s| s.code().unwrap_or(1))
.map_err(|e| wrap(e, "error running VS Code"))?;
let code = Command::new(binary)
.args(args)
.status()
.map(|s| s.code().unwrap_or(1))
.map_err(|e| wrap(e, "error running VS Code"))?;
Ok(code)
Ok(code)
}
/// Logger that uses the common rust "log" crate and directs back to one of
@ -136,34 +136,34 @@ async fn start_code(context: CommandContext, args: Vec<String>) -> Result<i32, A
struct RustyLogger(own_log::Logger);
impl log::Log for RustyLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Debug
}
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Debug
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata()) {
return;
}
fn log(&self, record: &Record) {
if !self.enabled(record.metadata()) {
return;
}
// exclude noisy log modules:
let src = match record.module_path() {
Some("russh::cipher") => return,
Some("russh::negotiation") => return,
Some(s) => s,
None => "<unknown>",
};
// exclude noisy log modules:
let src = match record.module_path() {
Some("russh::cipher") => return,
Some("russh::negotiation") => return,
Some(s) => s,
None => "<unknown>",
};
self.0.emit(
match record.level() {
log::Level::Debug => own_log::Level::Debug,
log::Level::Error => own_log::Level::Error,
log::Level::Info => own_log::Level::Info,
log::Level::Trace => own_log::Level::Trace,
log::Level::Warn => own_log::Level::Warn,
},
&format!("[{}] {}", src, record.args()),
);
}
self.0.emit(
match record.level() {
log::Level::Debug => own_log::Level::Debug,
log::Level::Error => own_log::Level::Error,
log::Level::Info => own_log::Level::Info,
log::Level::Trace => own_log::Level::Trace,
log::Level::Warn => own_log::Level::Warn,
},
&format!("[{}] {}", src, record.args()),
);
}
fn flush(&self) {}
fn flush(&self) {}
}

View file

@ -22,569 +22,569 @@ const TEMPLATE: &str = "
help_template = TEMPLATE,
long_about = None,
name = "Visual Studio Code CLI",
version = match constants::LAUNCHER_VERSION { Some(v) => v, None => "dev" },
version = match constants::VSCODE_CLI_VERSION { Some(v) => v, None => "dev" },
)]
pub struct Cli {
/// One or more files, folders, or URIs to open.
#[clap(name = "paths")]
pub open_paths: Vec<String>,
/// One or more files, folders, or URIs to open.
#[clap(name = "paths")]
pub open_paths: Vec<String>,
#[clap(flatten, next_help_heading = Some("EDITOR OPTIONS"))]
pub editor_options: EditorOptions,
#[clap(flatten, next_help_heading = Some("EDITOR OPTIONS"))]
pub editor_options: EditorOptions,
#[clap(flatten, next_help_heading = Some("EDITOR TROUBLESHOOTING"))]
pub troubleshooting: EditorTroubleshooting,
#[clap(flatten, next_help_heading = Some("EDITOR TROUBLESHOOTING"))]
pub troubleshooting: EditorTroubleshooting,
#[clap(flatten, next_help_heading = Some("GLOBAL OPTIONS"))]
pub global_options: GlobalOptions,
#[clap(flatten, next_help_heading = Some("GLOBAL OPTIONS"))]
pub global_options: GlobalOptions,
#[clap(subcommand)]
pub subcommand: Option<Commands>,
#[clap(subcommand)]
pub subcommand: Option<Commands>,
}
impl Cli {
pub fn get_base_code_args(&self) -> Vec<String> {
let mut args = self.open_paths.clone();
self.editor_options.add_code_args(&mut args);
self.troubleshooting.add_code_args(&mut args);
self.global_options.add_code_args(&mut args);
args
}
pub fn get_base_code_args(&self) -> Vec<String> {
let mut args = self.open_paths.clone();
self.editor_options.add_code_args(&mut args);
self.troubleshooting.add_code_args(&mut args);
self.global_options.add_code_args(&mut args);
args
}
}
impl<'a> From<&'a Cli> for CodeServerArgs {
fn from(cli: &'a Cli) -> Self {
let mut args = CodeServerArgs {
log: cli.global_options.log,
accept_server_license_terms: true,
..Default::default()
};
fn from(cli: &'a Cli) -> Self {
let mut args = CodeServerArgs {
log: cli.global_options.log,
accept_server_license_terms: true,
..Default::default()
};
args.log = cli.global_options.log;
args.accept_server_license_terms = true;
args.log = cli.global_options.log;
args.accept_server_license_terms = true;
if cli.global_options.verbose {
args.verbose = true;
}
if cli.global_options.verbose {
args.verbose = true;
}
if cli.global_options.disable_telemetry {
args.telemetry_level = Some(options::TelemetryLevel::Off);
} else if cli.global_options.telemetry_level.is_some() {
args.telemetry_level = cli.global_options.telemetry_level;
}
if cli.global_options.disable_telemetry {
args.telemetry_level = Some(options::TelemetryLevel::Off);
} else if cli.global_options.telemetry_level.is_some() {
args.telemetry_level = cli.global_options.telemetry_level;
}
args
}
args
}
}
#[derive(Subcommand, Debug, Clone)]
pub enum Commands {
/// Create a tunnel that's accessible on vscode.dev from anywhere.
/// Run `code tunnel --help` for more usage info.
Tunnel(TunnelArgs),
/// Create a tunnel that's accessible on vscode.dev from anywhere.
/// Run `code tunnel --help` for more usage info.
Tunnel(TunnelArgs),
/// Manage VS Code extensions.
#[clap(name = "ext")]
Extension(ExtensionArgs),
/// Manage VS Code extensions.
#[clap(name = "ext")]
Extension(ExtensionArgs),
/// Print process usage and diagnostics information.
Status,
/// Print process usage and diagnostics information.
Status,
/// Changes the version of VS Code you're using.
Version(VersionArgs),
/// Changes the version of VS Code you're using.
Version(VersionArgs),
}
#[derive(Args, Debug, Clone)]
pub struct ExtensionArgs {
#[clap(subcommand)]
pub subcommand: ExtensionSubcommand,
#[clap(subcommand)]
pub subcommand: ExtensionSubcommand,
#[clap(flatten)]
pub desktop_code_options: DesktopCodeOptions,
#[clap(flatten)]
pub desktop_code_options: DesktopCodeOptions,
}
impl ExtensionArgs {
pub fn add_code_args(&self, target: &mut Vec<String>) {
if let Some(ed) = &self.desktop_code_options.extensions_dir {
target.push(ed.to_string());
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
if let Some(ed) = &self.desktop_code_options.extensions_dir {
target.push(ed.to_string());
}
self.subcommand.add_code_args(target);
}
self.subcommand.add_code_args(target);
}
}
#[derive(Subcommand, Debug, Clone)]
pub enum ExtensionSubcommand {
/// List installed extensions.
List(ListExtensionArgs),
/// Install an extension.
Install(InstallExtensionArgs),
/// Uninstall an extension.
Uninstall(UninstallExtensionArgs),
/// List installed extensions.
List(ListExtensionArgs),
/// Install an extension.
Install(InstallExtensionArgs),
/// Uninstall an extension.
Uninstall(UninstallExtensionArgs),
}
impl ExtensionSubcommand {
pub fn add_code_args(&self, target: &mut Vec<String>) {
match self {
ExtensionSubcommand::List(args) => {
target.push("--list-extensions".to_string());
if args.show_versions {
target.push("--show-versions".to_string());
}
if let Some(category) = &args.category {
target.push(format!("--category={}", category));
}
}
ExtensionSubcommand::Install(args) => {
for id in args.id_or_path.iter() {
target.push(format!("--install-extension={}", id));
}
if args.pre_release {
target.push("--pre-release".to_string());
}
if args.force {
target.push("--force".to_string());
}
}
ExtensionSubcommand::Uninstall(args) => {
for id in args.id.iter() {
target.push(format!("--uninstall-extension={}", id));
}
}
}
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
match self {
ExtensionSubcommand::List(args) => {
target.push("--list-extensions".to_string());
if args.show_versions {
target.push("--show-versions".to_string());
}
if let Some(category) = &args.category {
target.push(format!("--category={}", category));
}
}
ExtensionSubcommand::Install(args) => {
for id in args.id_or_path.iter() {
target.push(format!("--install-extension={}", id));
}
if args.pre_release {
target.push("--pre-release".to_string());
}
if args.force {
target.push("--force".to_string());
}
}
ExtensionSubcommand::Uninstall(args) => {
for id in args.id.iter() {
target.push(format!("--uninstall-extension={}", id));
}
}
}
}
}
#[derive(Args, Debug, Clone)]
pub struct ListExtensionArgs {
/// Filters installed extensions by provided category, when using --list-extensions.
#[clap(long, value_name = "category")]
pub category: Option<String>,
/// Filters installed extensions by provided category, when using --list-extensions.
#[clap(long, value_name = "category")]
pub category: Option<String>,
/// Show versions of installed extensions, when using --list-extensions.
#[clap(long)]
pub show_versions: bool,
/// Show versions of installed extensions, when using --list-extensions.
#[clap(long)]
pub show_versions: bool,
}
#[derive(Args, Debug, Clone)]
pub struct InstallExtensionArgs {
/// Either an extension id or a path to a VSIX. The identifier of an
/// extension is '${publisher}.${name}'. Use '--force' argument to update
/// to latest version. To install a specific version provide '@${version}'.
/// For example: 'vscode.csharp@1.2.3'.
#[clap(name = "ext-id | id")]
pub id_or_path: Vec<String>,
/// Either an extension id or a path to a VSIX. The identifier of an
/// extension is '${publisher}.${name}'. Use '--force' argument to update
/// to latest version. To install a specific version provide '@${version}'.
/// For example: 'vscode.csharp@1.2.3'.
#[clap(name = "ext-id | id")]
pub id_or_path: Vec<String>,
/// Installs the pre-release version of the extension
#[clap(long)]
pub pre_release: bool,
/// Installs the pre-release version of the extension
#[clap(long)]
pub pre_release: bool,
/// Update to the latest version of the extension if it's already installed.
#[clap(long)]
pub force: bool,
/// Update to the latest version of the extension if it's already installed.
#[clap(long)]
pub force: bool,
}
#[derive(Args, Debug, Clone)]
pub struct UninstallExtensionArgs {
/// One or more extension identifiers to uninstall. The identifier of an
/// extension is '${publisher}.${name}'. Use '--force' argument to update
/// to latest version.
#[clap(name = "ext-id")]
pub id: Vec<String>,
/// One or more extension identifiers to uninstall. The identifier of an
/// extension is '${publisher}.${name}'. Use '--force' argument to update
/// to latest version.
#[clap(name = "ext-id")]
pub id: Vec<String>,
}
#[derive(Args, Debug, Clone)]
pub struct VersionArgs {
#[clap(subcommand)]
pub subcommand: VersionSubcommand,
#[clap(subcommand)]
pub subcommand: VersionSubcommand,
}
#[derive(Subcommand, Debug, Clone)]
pub enum VersionSubcommand {
/// Switches the instance of VS Code in use.
Use(UseVersionArgs),
/// Uninstalls a instance of VS Code.
Uninstall(UninstallVersionArgs),
/// Lists installed VS Code instances.
List(OutputFormatOptions),
/// Switches the instance of VS Code in use.
Use(UseVersionArgs),
/// Uninstalls a instance of VS Code.
Uninstall(UninstallVersionArgs),
/// Lists installed VS Code instances.
List(OutputFormatOptions),
}
#[derive(Args, Debug, Clone)]
pub struct UseVersionArgs {
/// The version of VS Code you want to use. Can be "stable", "insiders",
/// a version number, or an absolute path to an existing install.
#[clap(value_name = "stable | insiders | x.y.z | path")]
pub name: String,
/// The version of VS Code you want to use. Can be "stable", "insiders",
/// a version number, or an absolute path to an existing install.
#[clap(value_name = "stable | insiders | x.y.z | path")]
pub name: String,
/// The directory the version should be installed into, if it's not already installed.
#[clap(long, value_name = "path")]
pub install_dir: Option<String>,
/// The directory the version should be installed into, if it's not already installed.
#[clap(long, value_name = "path")]
pub install_dir: Option<String>,
/// Reinstall the version even if it's already installed.
#[clap(long)]
pub reinstall: bool,
/// Reinstall the version even if it's already installed.
#[clap(long)]
pub reinstall: bool,
}
#[derive(Args, Debug, Clone)]
pub struct UninstallVersionArgs {
/// The version of VS Code to uninstall. Can be "stable", "insiders", or a
/// version number previous passed to `code version use <version>`.
#[clap(value_name = "stable | insiders | x.y.z")]
pub name: String,
/// The version of VS Code to uninstall. Can be "stable", "insiders", or a
/// version number previous passed to `code version use <version>`.
#[clap(value_name = "stable | insiders | x.y.z")]
pub name: String,
}
#[derive(Args, Debug, Default)]
pub struct EditorOptions {
/// Compare two files with each other.
#[clap(short, long, value_names = &["file", "file"])]
pub diff: Vec<String>,
/// Compare two files with each other.
#[clap(short, long, value_names = &["file", "file"])]
pub diff: Vec<String>,
/// Add folder(s) to the last active window.
#[clap(short, long, value_name = "folder")]
pub add: Option<String>,
/// Add folder(s) to the last active window.
#[clap(short, long, value_name = "folder")]
pub add: Option<String>,
/// Open a file at the path on the specified line and character position.
#[clap(short, long, value_name = "file:line[:character]")]
pub goto: Option<String>,
/// Open a file at the path on the specified line and character position.
#[clap(short, long, value_name = "file:line[:character]")]
pub goto: Option<String>,
/// Force to open a new window.
#[clap(short, long)]
pub new_window: bool,
/// Force to open a new window.
#[clap(short, long)]
pub new_window: bool,
/// Force to open a file or folder in an
#[clap(short, long)]
pub reuse_window: bool,
/// Force to open a file or folder in an
#[clap(short, long)]
pub reuse_window: bool,
/// Wait for the files to be closed before returning.
#[clap(short, long)]
pub wait: bool,
/// Wait for the files to be closed before returning.
#[clap(short, long)]
pub wait: bool,
/// The locale to use (e.g. en-US or zh-TW).
#[clap(long, value_name = "locale")]
pub locale: Option<String>,
/// The locale to use (e.g. en-US or zh-TW).
#[clap(long, value_name = "locale")]
pub locale: Option<String>,
/// Enables proposed API features for extensions. Can receive one or
/// more extension IDs to enable individually.
#[clap(long, value_name = "ext-id")]
pub enable_proposed_api: Vec<String>,
/// Enables proposed API features for extensions. Can receive one or
/// more extension IDs to enable individually.
#[clap(long, value_name = "ext-id")]
pub enable_proposed_api: Vec<String>,
#[clap(flatten)]
pub code_options: DesktopCodeOptions,
#[clap(flatten)]
pub code_options: DesktopCodeOptions,
}
impl EditorOptions {
pub fn add_code_args(&self, target: &mut Vec<String>) {
if !self.diff.is_empty() {
target.push("--diff".to_string());
for file in self.diff.iter() {
target.push(file.clone());
}
}
if let Some(add) = &self.add {
target.push("--add".to_string());
target.push(add.clone());
}
if let Some(goto) = &self.goto {
target.push("--goto".to_string());
target.push(goto.clone());
}
if self.new_window {
target.push("--new-window".to_string());
}
if self.reuse_window {
target.push("--reuse-window".to_string());
}
if self.wait {
target.push("--wait".to_string());
}
if let Some(locale) = &self.locale {
target.push(format!("--locale={}", locale));
}
if !self.enable_proposed_api.is_empty() {
for id in self.enable_proposed_api.iter() {
target.push(format!("--enable-proposed-api={}", id));
}
}
self.code_options.add_code_args(target);
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
if !self.diff.is_empty() {
target.push("--diff".to_string());
for file in self.diff.iter() {
target.push(file.clone());
}
}
if let Some(add) = &self.add {
target.push("--add".to_string());
target.push(add.clone());
}
if let Some(goto) = &self.goto {
target.push("--goto".to_string());
target.push(goto.clone());
}
if self.new_window {
target.push("--new-window".to_string());
}
if self.reuse_window {
target.push("--reuse-window".to_string());
}
if self.wait {
target.push("--wait".to_string());
}
if let Some(locale) = &self.locale {
target.push(format!("--locale={}", locale));
}
if !self.enable_proposed_api.is_empty() {
for id in self.enable_proposed_api.iter() {
target.push(format!("--enable-proposed-api={}", id));
}
}
self.code_options.add_code_args(target);
}
}
/// Arguments applicable whenever VS Code desktop is launched
#[derive(Args, Debug, Default, Clone)]
pub struct DesktopCodeOptions {
/// Set the root path for extensions.
#[clap(long, value_name = "dir")]
pub extensions_dir: Option<String>,
/// Set the root path for extensions.
#[clap(long, value_name = "dir")]
pub extensions_dir: Option<String>,
/// Specifies the directory that user data is kept in. Can be used to
/// open multiple distinct instances of Code.
#[clap(long, value_name = "dir")]
pub user_data_dir: Option<String>,
/// Specifies the directory that user data is kept in. Can be used to
/// open multiple distinct instances of Code.
#[clap(long, value_name = "dir")]
pub user_data_dir: Option<String>,
/// Sets the VS Code version to use for this command. The preferred version
/// can be persisted with `code version use <version>`. Can be "stable",
/// "insiders", a version number, or an absolute path to an existing install.
#[clap(long, value_name = "stable | insiders | x.y.z | path")]
pub use_version: Option<String>,
/// Sets the VS Code version to use for this command. The preferred version
/// can be persisted with `code version use <version>`. Can be "stable",
/// "insiders", a version number, or an absolute path to an existing install.
#[clap(long, value_name = "stable | insiders | x.y.z | path")]
pub use_version: Option<String>,
}
/// Argument specifying the output format.
#[derive(Args, Debug, Clone)]
pub struct OutputFormatOptions {
/// Set the data output formats.
#[clap(arg_enum, long, value_name = "format", default_value_t = OutputFormat::Text)]
pub format: OutputFormat,
/// Set the data output formats.
#[clap(arg_enum, long, value_name = "format", default_value_t = OutputFormat::Text)]
pub format: OutputFormat,
}
impl DesktopCodeOptions {
pub fn add_code_args(&self, target: &mut Vec<String>) {
if let Some(extensions_dir) = &self.extensions_dir {
target.push(format!("--extensions-dir={}", extensions_dir));
}
if let Some(user_data_dir) = &self.user_data_dir {
target.push(format!("--user-data-dir={}", user_data_dir));
}
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
if let Some(extensions_dir) = &self.extensions_dir {
target.push(format!("--extensions-dir={}", extensions_dir));
}
if let Some(user_data_dir) = &self.user_data_dir {
target.push(format!("--user-data-dir={}", user_data_dir));
}
}
}
#[derive(Args, Debug, Default)]
pub struct GlobalOptions {
/// Directory where CLI metadata, such as VS Code installations, should be stored.
#[clap(long, env = "VSCODE_CLI_DATA_DIR", global = true)]
pub cli_data_dir: Option<String>,
/// Directory where CLI metadata, such as VS Code installations, should be stored.
#[clap(long, env = "VSCODE_CLI_DATA_DIR", global = true)]
pub cli_data_dir: Option<String>,
/// Print verbose output (implies --wait).
#[clap(long, global = true)]
pub verbose: bool,
/// Print verbose output (implies --wait).
#[clap(long, global = true)]
pub verbose: bool,
/// Log level to use.
#[clap(long, arg_enum, value_name = "level", global = true)]
pub log: Option<log::Level>,
/// Log level to use.
#[clap(long, arg_enum, value_name = "level", global = true)]
pub log: Option<log::Level>,
/// Disable telemetry for the current command, even if it was previously
/// accepted as part of the license prompt or specified in '--telemetry-level'
#[clap(long, global = true, hide = true)]
pub disable_telemetry: bool,
/// Disable telemetry for the current command, even if it was previously
/// accepted as part of the license prompt or specified in '--telemetry-level'
#[clap(long, global = true, hide = true)]
pub disable_telemetry: bool,
/// Sets the initial telemetry level
#[clap(arg_enum, long, global = true, hide = true)]
pub telemetry_level: Option<options::TelemetryLevel>,
/// Sets the initial telemetry level
#[clap(arg_enum, long, global = true, hide = true)]
pub telemetry_level: Option<options::TelemetryLevel>,
}
impl GlobalOptions {
pub fn add_code_args(&self, target: &mut Vec<String>) {
if self.verbose {
target.push("--verbose".to_string());
}
if let Some(log) = self.log {
target.push(format!("--log={}", log));
}
if self.disable_telemetry {
target.push("--disable-telemetry".to_string());
}
if let Some(telemetry_level) = &self.telemetry_level {
target.push(format!("--telemetry-level={}", telemetry_level));
}
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
if self.verbose {
target.push("--verbose".to_string());
}
if let Some(log) = self.log {
target.push(format!("--log={}", log));
}
if self.disable_telemetry {
target.push("--disable-telemetry".to_string());
}
if let Some(telemetry_level) = &self.telemetry_level {
target.push(format!("--telemetry-level={}", telemetry_level));
}
}
}
#[derive(Args, Debug, Default)]
pub struct EditorTroubleshooting {
/// Run CPU profiler during startup.
#[clap(long)]
pub prof_startup: bool,
/// Run CPU profiler during startup.
#[clap(long)]
pub prof_startup: bool,
/// Disable all installed extensions.
#[clap(long)]
pub disable_extensions: bool,
/// Disable all installed extensions.
#[clap(long)]
pub disable_extensions: bool,
/// Disable an extension.
#[clap(long, value_name = "ext-id")]
pub disable_extension: Vec<String>,
/// Disable an extension.
#[clap(long, value_name = "ext-id")]
pub disable_extension: Vec<String>,
/// Turn sync on or off.
#[clap(arg_enum, long, value_name = "on | off")]
pub sync: Option<SyncState>,
/// Turn sync on or off.
#[clap(arg_enum, long, value_name = "on | off")]
pub sync: Option<SyncState>,
/// Allow debugging and profiling of extensions. Check the developer tools for the connection URI.
#[clap(long, value_name = "port")]
pub inspect_extensions: Option<u16>,
/// Allow debugging and profiling of extensions. Check the developer tools for the connection URI.
#[clap(long, value_name = "port")]
pub inspect_extensions: Option<u16>,
/// Allow debugging and profiling of extensions with the extension host
/// being paused after start. Check the developer tools for the connection URI.
#[clap(long, value_name = "port")]
pub inspect_brk_extensions: Option<u16>,
/// Allow debugging and profiling of extensions with the extension host
/// being paused after start. Check the developer tools for the connection URI.
#[clap(long, value_name = "port")]
pub inspect_brk_extensions: Option<u16>,
/// Disable GPU hardware acceleration.
#[clap(long)]
pub disable_gpu: bool,
/// Disable GPU hardware acceleration.
#[clap(long)]
pub disable_gpu: bool,
/// Max memory size for a window (in Mbytes).
#[clap(long, value_name = "memory")]
pub max_memory: Option<usize>,
/// Max memory size for a window (in Mbytes).
#[clap(long, value_name = "memory")]
pub max_memory: Option<usize>,
/// Shows all telemetry events which VS code collects.
#[clap(long)]
pub telemetry: bool,
/// Shows all telemetry events which VS code collects.
#[clap(long)]
pub telemetry: bool,
}
impl EditorTroubleshooting {
pub fn add_code_args(&self, target: &mut Vec<String>) {
if self.prof_startup {
target.push("--prof-startup".to_string());
}
if self.disable_extensions {
target.push("--disable-extensions".to_string());
}
for id in self.disable_extension.iter() {
target.push(format!("--disable-extension={}", id));
}
if let Some(sync) = &self.sync {
target.push(format!("--sync={}", sync));
}
if let Some(port) = &self.inspect_extensions {
target.push(format!("--inspect-extensions={}", port));
}
if let Some(port) = &self.inspect_brk_extensions {
target.push(format!("--inspect-brk-extensions={}", port));
}
if self.disable_gpu {
target.push("--disable-gpu".to_string());
}
if let Some(memory) = &self.max_memory {
target.push(format!("--max-memory={}", memory));
}
if self.telemetry {
target.push("--telemetry".to_string());
}
}
pub fn add_code_args(&self, target: &mut Vec<String>) {
if self.prof_startup {
target.push("--prof-startup".to_string());
}
if self.disable_extensions {
target.push("--disable-extensions".to_string());
}
for id in self.disable_extension.iter() {
target.push(format!("--disable-extension={}", id));
}
if let Some(sync) = &self.sync {
target.push(format!("--sync={}", sync));
}
if let Some(port) = &self.inspect_extensions {
target.push(format!("--inspect-extensions={}", port));
}
if let Some(port) = &self.inspect_brk_extensions {
target.push(format!("--inspect-brk-extensions={}", port));
}
if self.disable_gpu {
target.push("--disable-gpu".to_string());
}
if let Some(memory) = &self.max_memory {
target.push(format!("--max-memory={}", memory));
}
if self.telemetry {
target.push("--telemetry".to_string());
}
}
}
#[derive(ArgEnum, Clone, Copy, Debug)]
pub enum SyncState {
On,
Off,
On,
Off,
}
impl fmt::Display for SyncState {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SyncState::Off => write!(f, "off"),
SyncState::On => write!(f, "on"),
}
}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SyncState::Off => write!(f, "off"),
SyncState::On => write!(f, "on"),
}
}
}
#[derive(ArgEnum, Clone, Copy, Debug)]
pub enum OutputFormat {
Json,
Text,
Json,
Text,
}
#[derive(Args, Clone, Debug, Default)]
pub struct ExistingTunnelArgs {
/// Name you'd like to assign preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub tunnel_name: Option<String>,
/// Name you'd like to assign preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub tunnel_name: Option<String>,
/// Token to authenticate and use preexisting tunnel
#[clap(long, hide = true)]
pub host_token: Option<String>,
/// Token to authenticate and use preexisting tunnel
#[clap(long, hide = true)]
pub host_token: Option<String>,
/// ID of preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub tunnel_id: Option<String>,
/// ID of preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub tunnel_id: Option<String>,
/// Cluster of preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub cluster: Option<String>,
/// Cluster of preexisting tunnel to use to connect the tunnel
#[clap(long, hide = true)]
pub cluster: Option<String>,
}
#[derive(Args, Debug, Clone, Default)]
pub struct TunnelServeArgs {
/// Optional details to connect to an existing tunnel
#[clap(flatten, next_help_heading = Some("ADVANCED OPTIONS"))]
pub tunnel: ExistingTunnelArgs,
/// Optional details to connect to an existing tunnel
#[clap(flatten, next_help_heading = Some("ADVANCED OPTIONS"))]
pub tunnel: ExistingTunnelArgs,
/// Randomly name machine for port forwarding service
#[clap(long)]
pub random_name: bool,
/// Randomly name machine for port forwarding service
#[clap(long)]
pub random_name: bool,
}
#[derive(Args, Debug, Clone)]
pub struct TunnelArgs {
#[clap(subcommand)]
pub subcommand: Option<TunnelSubcommand>,
#[clap(subcommand)]
pub subcommand: Option<TunnelSubcommand>,
#[clap(flatten)]
pub serve_args: TunnelServeArgs,
#[clap(flatten)]
pub serve_args: TunnelServeArgs,
}
#[derive(Subcommand, Debug, Clone)]
pub enum TunnelSubcommand {
/// Delete all servers which are currently not running.
Prune,
/// Delete all servers which are currently not running.
Prune,
/// Rename the name of this machine associated with port forwarding service.
Rename(TunnelRenameArgs),
/// Rename the name of this machine associated with port forwarding service.
Rename(TunnelRenameArgs),
/// Remove this machine's association with the port forwarding service.
Unregister,
/// Remove this machine's association with the port forwarding service.
Unregister,
#[clap(subcommand)]
User(TunnelUserSubCommands),
#[clap(subcommand)]
User(TunnelUserSubCommands),
/// Manages the tunnel when installed as a system service,
#[clap(subcommand)]
Service(TunnelServiceSubCommands),
/// Manages the tunnel when installed as a system service,
#[clap(subcommand)]
Service(TunnelServiceSubCommands),
}
#[derive(Subcommand, Debug, Clone)]
pub enum TunnelServiceSubCommands {
/// Installs or re-installs the tunnel service on the machine.
Install,
/// Installs or re-installs the tunnel service on the machine.
Install,
/// Uninstalls and stops the tunnel service.
Uninstall,
/// Uninstalls and stops the tunnel service.
Uninstall,
/// Internal command for running the service
#[clap(hide = true)]
InternalRun,
/// Internal command for running the service
#[clap(hide = true)]
InternalRun,
}
#[derive(Args, Debug, Clone)]
pub struct TunnelRenameArgs {
/// The name you'd like to rename your machine to.
pub name: String,
/// The name you'd like to rename your machine to.
pub name: String,
}
#[derive(Subcommand, Debug, Clone)]
pub enum TunnelUserSubCommands {
/// Log in to port forwarding service
Login(LoginArgs),
/// Log in to port forwarding service
Login(LoginArgs),
/// Log out of port forwarding service
Logout,
/// Log out of port forwarding service
Logout,
/// Show the account that's logged into port forwarding service
Show,
/// Show the account that's logged into port forwarding service
Show,
}
#[derive(Args, Debug, Clone)]
pub struct LoginArgs {
/// An access token to store for authentication. Note: this will not be
/// refreshed if it expires!
#[clap(long, requires = "provider")]
pub access_token: Option<String>,
/// An access token to store for authentication. Note: this will not be
/// refreshed if it expires!
#[clap(long, requires = "provider")]
pub access_token: Option<String>,
/// The auth provider to use. If not provided, a prompt will be shown.
#[clap(arg_enum, long)]
pub provider: Option<AuthProvider>,
/// The auth provider to use. If not provided, a prompt will be shown.
#[clap(arg_enum, long)]
pub provider: Option<AuthProvider>,
}
#[derive(clap::ArgEnum, Debug, Clone, Copy)]
pub enum AuthProvider {
Microsoft,
Github,
Microsoft,
Github,
}

View file

@ -8,8 +8,8 @@ use crate::{log, state::LauncherPaths};
use super::args::Cli;
pub struct CommandContext {
pub log: log::Logger,
pub paths: LauncherPaths,
pub args: Cli,
pub http: reqwest::Client,
pub log: log::Logger,
pub paths: LauncherPaths,
pub args: Cli,
pub http: reqwest::Client,
}

View file

@ -10,126 +10,126 @@ use std::io::{BufWriter, Write};
use super::args::OutputFormat;
pub struct Column {
max_width: usize,
heading: &'static str,
data: Vec<String>,
max_width: usize,
heading: &'static str,
data: Vec<String>,
}
impl Column {
pub fn new(heading: &'static str) -> Self {
Column {
max_width: heading.len(),
heading,
data: vec![],
}
}
pub fn new(heading: &'static str) -> Self {
Column {
max_width: heading.len(),
heading,
data: vec![],
}
}
pub fn add_row(&mut self, row: String) {
self.max_width = std::cmp::max(self.max_width, row.len());
self.data.push(row);
}
pub fn add_row(&mut self, row: String) {
self.max_width = std::cmp::max(self.max_width, row.len());
self.data.push(row);
}
}
impl OutputFormat {
pub fn print_table(&self, table: OutputTable) -> Result<(), std::io::Error> {
match *self {
OutputFormat::Json => JsonTablePrinter().print(table, &mut std::io::stdout()),
OutputFormat::Text => TextTablePrinter().print(table, &mut std::io::stdout()),
}
}
pub fn print_table(&self, table: OutputTable) -> Result<(), std::io::Error> {
match *self {
OutputFormat::Json => JsonTablePrinter().print(table, &mut std::io::stdout()),
OutputFormat::Text => TextTablePrinter().print(table, &mut std::io::stdout()),
}
}
}
pub struct OutputTable {
cols: Vec<Column>,
cols: Vec<Column>,
}
impl OutputTable {
pub fn new(cols: Vec<Column>) -> Self {
OutputTable { cols }
}
pub fn new(cols: Vec<Column>) -> Self {
OutputTable { cols }
}
}
trait TablePrinter {
fn print(&self, table: OutputTable, out: &mut dyn std::io::Write)
-> Result<(), std::io::Error>;
fn print(&self, table: OutputTable, out: &mut dyn std::io::Write)
-> Result<(), std::io::Error>;
}
pub struct JsonTablePrinter();
impl TablePrinter for JsonTablePrinter {
fn print(
&self,
table: OutputTable,
out: &mut dyn std::io::Write,
) -> Result<(), std::io::Error> {
let mut bw = BufWriter::new(out);
bw.write_all(b"[")?;
fn print(
&self,
table: OutputTable,
out: &mut dyn std::io::Write,
) -> Result<(), std::io::Error> {
let mut bw = BufWriter::new(out);
bw.write_all(b"[")?;
if !table.cols.is_empty() {
let data_len = table.cols[0].data.len();
for i in 0..data_len {
if i > 0 {
bw.write_all(b",{")?;
} else {
bw.write_all(b"{")?;
}
for col in &table.cols {
serde_json::to_writer(&mut bw, col.heading)?;
bw.write_all(b":")?;
serde_json::to_writer(&mut bw, &col.data[i])?;
}
}
}
if !table.cols.is_empty() {
let data_len = table.cols[0].data.len();
for i in 0..data_len {
if i > 0 {
bw.write_all(b",{")?;
} else {
bw.write_all(b"{")?;
}
for col in &table.cols {
serde_json::to_writer(&mut bw, col.heading)?;
bw.write_all(b":")?;
serde_json::to_writer(&mut bw, &col.data[i])?;
}
}
}
bw.write_all(b"]")?;
bw.flush()
}
bw.write_all(b"]")?;
bw.flush()
}
}
/// Type that prints the output as an ASCII, markdown-style table.
pub struct TextTablePrinter();
impl TablePrinter for TextTablePrinter {
fn print(
&self,
table: OutputTable,
out: &mut dyn std::io::Write,
) -> Result<(), std::io::Error> {
let mut bw = BufWriter::new(out);
fn print(
&self,
table: OutputTable,
out: &mut dyn std::io::Write,
) -> Result<(), std::io::Error> {
let mut bw = BufWriter::new(out);
let sizes = table.cols.iter().map(|c| c.max_width).collect::<Vec<_>>();
let sizes = table.cols.iter().map(|c| c.max_width).collect::<Vec<_>>();
// print headers
write_columns(&mut bw, table.cols.iter().map(|c| c.heading), &sizes)?;
// print --- separators
write_columns(
&mut bw,
table.cols.iter().map(|c| "-".repeat(c.max_width)),
&sizes,
)?;
// print each column
if !table.cols.is_empty() {
let data_len = table.cols[0].data.len();
for i in 0..data_len {
write_columns(&mut bw, table.cols.iter().map(|c| &c.data[i]), &sizes)?;
}
}
// print headers
write_columns(&mut bw, table.cols.iter().map(|c| c.heading), &sizes)?;
// print --- separators
write_columns(
&mut bw,
table.cols.iter().map(|c| "-".repeat(c.max_width)),
&sizes,
)?;
// print each column
if !table.cols.is_empty() {
let data_len = table.cols[0].data.len();
for i in 0..data_len {
write_columns(&mut bw, table.cols.iter().map(|c| &c.data[i]), &sizes)?;
}
}
bw.flush()
}
bw.flush()
}
}
fn write_columns<T>(
mut w: impl Write,
cols: impl Iterator<Item = T>,
sizes: &[usize],
mut w: impl Write,
cols: impl Iterator<Item = T>,
sizes: &[usize],
) -> Result<(), std::io::Error>
where
T: Display,
T: Display,
{
w.write_all(b"|")?;
for (i, col) in cols.enumerate() {
write!(w, " {:width$} |", col, width = sizes[i])?;
}
w.write_all(b"\r\n")
w.write_all(b"|")?;
for (i, col) in cols.enumerate() {
write!(w, " {:width$} |", col, width = sizes[i])?;
}
w.write_all(b"\r\n")
}

View file

@ -9,253 +9,253 @@ use async_trait::async_trait;
use tokio::sync::oneshot;
use super::{
args::{
AuthProvider, Cli, ExistingTunnelArgs, TunnelRenameArgs, TunnelServeArgs,
TunnelServiceSubCommands, TunnelUserSubCommands,
},
CommandContext,
args::{
AuthProvider, Cli, ExistingTunnelArgs, TunnelRenameArgs, TunnelServeArgs,
TunnelServiceSubCommands, TunnelUserSubCommands,
},
CommandContext,
};
use crate::{
auth::Auth,
log::{self, Logger},
state::LauncherPaths,
tunnels::{
code_server::CodeServerArgs, create_service_manager, dev_tunnels, legal,
paths::get_all_servers, ServiceContainer, ServiceManager,
},
util::{
errors::{wrap, AnyError},
prereqs::PreReqChecker,
},
auth::Auth,
log::{self, Logger},
state::LauncherPaths,
tunnels::{
code_server::CodeServerArgs, create_service_manager, dev_tunnels, legal,
paths::get_all_servers, ServiceContainer, ServiceManager,
},
util::{
errors::{wrap, AnyError},
prereqs::PreReqChecker,
},
};
impl From<AuthProvider> for crate::auth::AuthProvider {
fn from(auth_provider: AuthProvider) -> Self {
match auth_provider {
AuthProvider::Github => crate::auth::AuthProvider::Github,
AuthProvider::Microsoft => crate::auth::AuthProvider::Microsoft,
}
}
fn from(auth_provider: AuthProvider) -> Self {
match auth_provider {
AuthProvider::Github => crate::auth::AuthProvider::Github,
AuthProvider::Microsoft => crate::auth::AuthProvider::Microsoft,
}
}
}
impl From<ExistingTunnelArgs> for Option<dev_tunnels::ExistingTunnel> {
fn from(d: ExistingTunnelArgs) -> Option<dev_tunnels::ExistingTunnel> {
if let (Some(tunnel_id), Some(tunnel_name), Some(cluster), Some(host_token)) =
(d.tunnel_id, d.tunnel_name, d.cluster, d.host_token)
{
Some(dev_tunnels::ExistingTunnel {
tunnel_id,
tunnel_name,
host_token,
cluster,
})
} else {
None
}
}
fn from(d: ExistingTunnelArgs) -> Option<dev_tunnels::ExistingTunnel> {
if let (Some(tunnel_id), Some(tunnel_name), Some(cluster), Some(host_token)) =
(d.tunnel_id, d.tunnel_name, d.cluster, d.host_token)
{
Some(dev_tunnels::ExistingTunnel {
tunnel_id,
tunnel_name,
host_token,
cluster,
})
} else {
None
}
}
}
struct TunnelServiceContainer {
args: Cli,
args: Cli,
}
impl TunnelServiceContainer {
fn new(args: Cli) -> Self {
Self { args }
}
fn new(args: Cli) -> Self {
Self { args }
}
}
#[async_trait]
impl ServiceContainer for TunnelServiceContainer {
async fn run_service(
&mut self,
log: log::Logger,
launcher_paths: LauncherPaths,
shutdown_rx: oneshot::Receiver<()>,
) -> Result<(), AnyError> {
let csa = (&self.args).into();
serve_with_csa(
launcher_paths,
log,
TunnelServeArgs {
random_name: true, // avoid prompting
..Default::default()
},
csa,
Some(shutdown_rx),
)
.await?;
Ok(())
}
async fn run_service(
&mut self,
log: log::Logger,
launcher_paths: LauncherPaths,
shutdown_rx: oneshot::Receiver<()>,
) -> Result<(), AnyError> {
let csa = (&self.args).into();
serve_with_csa(
launcher_paths,
log,
TunnelServeArgs {
random_name: true, // avoid prompting
..Default::default()
},
csa,
Some(shutdown_rx),
)
.await?;
Ok(())
}
}
pub async fn service(
ctx: CommandContext,
service_args: TunnelServiceSubCommands,
ctx: CommandContext,
service_args: TunnelServiceSubCommands,
) -> Result<i32, AnyError> {
let manager = create_service_manager(ctx.log.clone());
match service_args {
TunnelServiceSubCommands::Install => {
// ensure logged in, otherwise subsequent serving will fail
Auth::new(&ctx.paths, ctx.log.clone())
.get_credential()
.await?;
let manager = create_service_manager(ctx.log.clone());
match service_args {
TunnelServiceSubCommands::Install => {
// ensure logged in, otherwise subsequent serving will fail
Auth::new(&ctx.paths, ctx.log.clone())
.get_credential()
.await?;
// likewise for license consent
legal::require_consent(&ctx.paths)?;
// likewise for license consent
legal::require_consent(&ctx.paths)?;
let current_exe =
std::env::current_exe().map_err(|e| wrap(e, "could not get current exe"))?;
let current_exe =
std::env::current_exe().map_err(|e| wrap(e, "could not get current exe"))?;
manager.register(
current_exe,
&[
"--cli-data-dir",
ctx.paths.root().as_os_str().to_string_lossy().as_ref(),
"tunnel",
"service",
"internal-run",
],
)?;
ctx.log.result("Service successfully installed! You can use `code tunnel service log` to monitor it, and `code tunnel service uninstall` to remove it.");
}
TunnelServiceSubCommands::Uninstall => {
manager.unregister()?;
}
TunnelServiceSubCommands::InternalRun => {
manager.run(ctx.paths.clone(), TunnelServiceContainer::new(ctx.args))?;
}
}
manager.register(
current_exe,
&[
"--cli-data-dir",
ctx.paths.root().as_os_str().to_string_lossy().as_ref(),
"tunnel",
"service",
"internal-run",
],
)?;
ctx.log.result("Service successfully installed! You can use `code tunnel service log` to monitor it, and `code tunnel service uninstall` to remove it.");
}
TunnelServiceSubCommands::Uninstall => {
manager.unregister()?;
}
TunnelServiceSubCommands::InternalRun => {
manager.run(ctx.paths.clone(), TunnelServiceContainer::new(ctx.args))?;
}
}
Ok(0)
Ok(0)
}
pub async fn user(ctx: CommandContext, user_args: TunnelUserSubCommands) -> Result<i32, AnyError> {
let auth = Auth::new(&ctx.paths, ctx.log.clone());
match user_args {
TunnelUserSubCommands::Login(login_args) => {
auth.login(
login_args.provider.map(|p| p.into()),
login_args.access_token.to_owned(),
)
.await?;
}
TunnelUserSubCommands::Logout => {
auth.clear_credentials()?;
}
TunnelUserSubCommands::Show => {
if let Ok(Some(_)) = auth.get_current_credential() {
ctx.log.result("logged in");
} else {
ctx.log.result("not logged in");
return Ok(1);
}
}
}
let auth = Auth::new(&ctx.paths, ctx.log.clone());
match user_args {
TunnelUserSubCommands::Login(login_args) => {
auth.login(
login_args.provider.map(|p| p.into()),
login_args.access_token.to_owned(),
)
.await?;
}
TunnelUserSubCommands::Logout => {
auth.clear_credentials()?;
}
TunnelUserSubCommands::Show => {
if let Ok(Some(_)) = auth.get_current_credential() {
ctx.log.result("logged in");
} else {
ctx.log.result("not logged in");
return Ok(1);
}
}
}
Ok(0)
Ok(0)
}
/// Remove the tunnel used by this gateway, if any.
pub async fn rename(ctx: CommandContext, rename_args: TunnelRenameArgs) -> Result<i32, AnyError> {
let auth = Auth::new(&ctx.paths, ctx.log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&ctx.log, auth, &ctx.paths);
dt.rename_tunnel(&rename_args.name).await?;
ctx.log.result(&format!(
"Successfully renamed this gateway to {}",
&rename_args.name
));
let auth = Auth::new(&ctx.paths, ctx.log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&ctx.log, auth, &ctx.paths);
dt.rename_tunnel(&rename_args.name).await?;
ctx.log.result(&format!(
"Successfully renamed this gateway to {}",
&rename_args.name
));
Ok(0)
Ok(0)
}
/// Remove the tunnel used by this gateway, if any.
pub async fn unregister(ctx: CommandContext) -> Result<i32, AnyError> {
let auth = Auth::new(&ctx.paths, ctx.log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&ctx.log, auth, &ctx.paths);
dt.remove_tunnel().await?;
Ok(0)
let auth = Auth::new(&ctx.paths, ctx.log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&ctx.log, auth, &ctx.paths);
dt.remove_tunnel().await?;
Ok(0)
}
/// Removes unused servers.
pub async fn prune(ctx: CommandContext) -> Result<i32, AnyError> {
get_all_servers(&ctx.paths)
.into_iter()
.map(|s| s.server_paths(&ctx.paths))
.filter(|s| s.get_running_pid().is_none())
.try_for_each(|s| {
ctx.log
.result(&format!("Deleted {}", s.server_dir.display()));
s.delete()
})
.map_err(AnyError::from)?;
get_all_servers(&ctx.paths)
.into_iter()
.map(|s| s.server_paths(&ctx.paths))
.filter(|s| s.get_running_pid().is_none())
.try_for_each(|s| {
ctx.log
.result(&format!("Deleted {}", s.server_dir.display()));
s.delete()
})
.map_err(AnyError::from)?;
ctx.log.result("Successfully removed all unused servers");
ctx.log.result("Successfully removed all unused servers");
Ok(0)
Ok(0)
}
/// Starts the gateway server.
pub async fn serve(ctx: CommandContext, gateway_args: TunnelServeArgs) -> Result<i32, AnyError> {
let CommandContext {
log, paths, args, ..
} = ctx;
let CommandContext {
log, paths, args, ..
} = ctx;
legal::require_consent(&paths)?;
legal::require_consent(&paths)?;
let csa = (&args).into();
serve_with_csa(paths, log, gateway_args, csa, None).await
let csa = (&args).into();
serve_with_csa(paths, log, gateway_args, csa, None).await
}
async fn serve_with_csa(
paths: LauncherPaths,
log: Logger,
gateway_args: TunnelServeArgs,
csa: CodeServerArgs,
shutdown_rx: Option<oneshot::Receiver<()>>,
paths: LauncherPaths,
log: Logger,
gateway_args: TunnelServeArgs,
csa: CodeServerArgs,
shutdown_rx: Option<oneshot::Receiver<()>>,
) -> Result<i32, AnyError> {
let platform = spanf!(log, log.span("prereq"), PreReqChecker::new().verify())?;
let platform = spanf!(log, log.span("prereq"), PreReqChecker::new().verify())?;
let auth = Auth::new(&paths, log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&log, auth, &paths);
let tunnel = if let Some(d) = gateway_args.tunnel.clone().into() {
dt.start_existing_tunnel(d).await
} else {
dt.start_new_launcher_tunnel(gateway_args.random_name).await
}?;
let auth = Auth::new(&paths, log.clone());
let mut dt = dev_tunnels::DevTunnels::new(&log, auth, &paths);
let tunnel = if let Some(d) = gateway_args.tunnel.clone().into() {
dt.start_existing_tunnel(d).await
} else {
dt.start_new_launcher_tunnel(gateway_args.random_name).await
}?;
let shutdown_tx = if let Some(tx) = shutdown_rx {
tx
} else {
let (tx, rx) = oneshot::channel();
tokio::spawn(async move {
tokio::signal::ctrl_c().await.ok();
tx.send(()).ok();
});
rx
};
let shutdown_tx = if let Some(tx) = shutdown_rx {
tx
} else {
let (tx, rx) = oneshot::channel();
tokio::spawn(async move {
tokio::signal::ctrl_c().await.ok();
tx.send(()).ok();
});
rx
};
let mut r = crate::tunnels::serve(&log, tunnel, &paths, &csa, platform, shutdown_tx).await?;
r.tunnel.close().await.ok();
let mut r = crate::tunnels::serve(&log, tunnel, &paths, &csa, platform, shutdown_tx).await?;
r.tunnel.close().await.ok();
if r.respawn {
warning!(log, "respawn requested, starting new server");
// reuse current args, but specify no-forward since tunnels will
// already be running in this process, and we cannot do a login
let args = std::env::args().skip(1).collect::<Vec<String>>();
let exit = std::process::Command::new(std::env::current_exe().unwrap())
.args(args)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.spawn()
.map_err(|e| wrap(e, "error respawning after update"))?
.wait()
.map_err(|e| wrap(e, "error waiting for child"))?;
if r.respawn {
warning!(log, "respawn requested, starting new server");
// reuse current args, but specify no-forward since tunnels will
// already be running in this process, and we cannot do a login
let args = std::env::args().skip(1).collect::<Vec<String>>();
let exit = std::process::Command::new(std::env::current_exe().unwrap())
.args(args)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.stdin(Stdio::inherit())
.spawn()
.map_err(|e| wrap(e, "error respawning after update"))?
.wait()
.map_err(|e| wrap(e, "error waiting for child"))?;
return Ok(exit.code().unwrap_or(1));
}
return Ok(exit.code().unwrap_or(1));
}
Ok(0)
Ok(0)
}

View file

@ -4,63 +4,63 @@
*--------------------------------------------------------------------------------------------*/
use crate::{
desktop::{CodeVersionManager, RequestedVersion},
log,
update_service::UpdateService,
util::{errors::AnyError, prereqs::PreReqChecker},
desktop::{CodeVersionManager, RequestedVersion},
log,
update_service::UpdateService,
util::{errors::AnyError, prereqs::PreReqChecker},
};
use super::{
args::{OutputFormatOptions, UninstallVersionArgs, UseVersionArgs},
output::{Column, OutputTable},
CommandContext,
args::{OutputFormatOptions, UninstallVersionArgs, UseVersionArgs},
output::{Column, OutputTable},
CommandContext,
};
pub async fn switch_to(ctx: CommandContext, args: UseVersionArgs) -> Result<i32, AnyError> {
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let version = RequestedVersion::try_from(args.name.as_str())?;
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let version = RequestedVersion::try_from(args.name.as_str())?;
if !args.reinstall && vm.try_get_entrypoint(&version).await.is_some() {
vm.set_preferred_version(&version)?;
print_now_using(&ctx.log, &version);
return Ok(0);
}
if !args.reinstall && vm.try_get_entrypoint(&version).await.is_some() {
vm.set_preferred_version(&version)?;
print_now_using(&ctx.log, &version);
return Ok(0);
}
let update_service = UpdateService::new(ctx.log.clone(), ctx.http.clone());
vm.install(&update_service, &version).await?;
vm.set_preferred_version(&version)?;
print_now_using(&ctx.log, &version);
Ok(0)
let update_service = UpdateService::new(ctx.log.clone(), ctx.http.clone());
vm.install(&update_service, &version).await?;
vm.set_preferred_version(&version)?;
print_now_using(&ctx.log, &version);
Ok(0)
}
pub async fn list(ctx: CommandContext, args: OutputFormatOptions) -> Result<i32, AnyError> {
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let mut name = Column::new("Installation");
let mut command = Column::new("Command");
for version in vm.list() {
name.add_row(version.to_string());
command.add_row(version.get_command());
}
args.format
.print_table(OutputTable::new(vec![name, command]))
.ok();
let mut name = Column::new("Installation");
let mut command = Column::new("Command");
for version in vm.list() {
name.add_row(version.to_string());
command.add_row(version.get_command());
}
args.format
.print_table(OutputTable::new(vec![name, command]))
.ok();
Ok(0)
Ok(0)
}
pub async fn uninstall(ctx: CommandContext, args: UninstallVersionArgs) -> Result<i32, AnyError> {
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let version = RequestedVersion::try_from(args.name.as_str())?;
vm.uninstall(&version).await?;
ctx.log
.result(&format!("VS Code {} uninstalled successfully", version));
Ok(0)
let platform = PreReqChecker::new().verify().await?;
let vm = CodeVersionManager::new(&ctx.paths, platform);
let version = RequestedVersion::try_from(args.name.as_str())?;
vm.uninstall(&version).await?;
ctx.log
.result(&format!("VS Code {} uninstalled successfully", version));
Ok(0)
}
fn print_now_using(log: &log::Logger, version: &RequestedVersion) {
log.result(&format!("Now using VS Code {}", version));
log.result(&format!("Now using VS Code {}", version));
}

View file

@ -8,37 +8,26 @@ use lazy_static::lazy_static;
pub const CONTROL_PORT: u16 = 31545;
pub const PROTOCOL_VERSION: u32 = 1;
pub const LAUNCHER_VERSION: Option<&'static str> = option_env!("LAUNCHER_VERSION");
pub const LAUNCHER_ASSET_NAME: Option<&'static str> =
if cfg!(all(target_os = "macos", target_arch = "x86_64")) {
Some("x86_64-apple-darwin-signed")
} else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
Some("aarch64-apple-darwin-signed")
} else if cfg!(all(target_os = "windows", target_arch = "x86_64")) {
Some("x86_64-pc-windows-msvc-signed")
} else if cfg!(all(target_os = "windows", target_arch = "aarch64")) {
Some("aarch64-pc-windows-msvc-signed")
} else {
option_env!("LAUNCHER_ASSET_NAME")
};
pub const LAUNCHER_AI_KEY: Option<&'static str> = option_env!("LAUNCHER_AI_KEY");
pub const LAUNCHER_AI_ENDPOINT: Option<&'static str> = option_env!("LAUNCHER_AI_ENDPOINT");
pub const VSCODE_CLI_UPDATE_ENDPOINT: Option<&'static str> = option_env!("LAUNCHER_AI_ENDPOINT");
pub const VSCODE_CLI_VERSION: Option<&'static str> = option_env!("VSCODE_CLI_VERSION");
pub const VSCODE_CLI_ASSET_NAME: Option<&'static str> = option_env!("VSCODE_CLI_ASSET_NAME");
pub const VSCODE_CLI_AI_KEY: Option<&'static str> = option_env!("VSCODE_CLI_AI_KEY");
pub const VSCODE_CLI_AI_ENDPOINT: Option<&'static str> = option_env!("VSCODE_CLI_AI_ENDPOINT");
pub const VSCODE_CLI_UPDATE_ENDPOINT: Option<&'static str> =
option_env!("VSCODE_CLI_UPDATE_ENDPOINT");
pub const TUNNEL_SERVICE_USER_AGENT_ENV_VAR: &str = "TUNNEL_SERVICE_USER_AGENT";
pub fn get_default_user_agent() -> String {
format!(
"vscode-server-launcher/{}",
LAUNCHER_VERSION.unwrap_or("dev")
)
format!(
"vscode-server-launcher/{}",
VSCODE_CLI_VERSION.unwrap_or("dev")
)
}
lazy_static! {
pub static ref TUNNEL_SERVICE_USER_AGENT: String =
match std::env::var(TUNNEL_SERVICE_USER_AGENT_ENV_VAR) {
Ok(ua) if !ua.is_empty() => format!("{} {}", ua, get_default_user_agent()),
_ => get_default_user_agent(),
};
pub static ref TUNNEL_SERVICE_USER_AGENT: String =
match std::env::var(TUNNEL_SERVICE_USER_AGENT_ENV_VAR) {
Ok(ua) if !ua.is_empty() => format!("{} {}", ua, get_default_user_agent()),
_ => get_default_user_agent(),
};
}

View file

@ -4,8 +4,8 @@
*--------------------------------------------------------------------------------------------*/
use std::{
fmt,
path::{Path, PathBuf},
fmt,
path::{Path, PathBuf},
};
use indicatif::ProgressBar;
@ -15,478 +15,478 @@ use serde::{Deserialize, Serialize};
use tokio::fs::remove_dir_all;
use crate::{
options,
state::{LauncherPaths, PersistedState},
update_service::{unzip_downloaded_release, Platform, Release, TargetKind, UpdateService},
util::{
errors::{
wrap, AnyError, InvalidRequestedVersion, MissingEntrypointError,
NoInstallInUserProvidedPath, UserCancelledInstallation, WrappedError,
},
http,
input::{prompt_yn, ProgressBarReporter},
},
options,
state::{LauncherPaths, PersistedState},
update_service::{unzip_downloaded_release, Platform, Release, TargetKind, UpdateService},
util::{
errors::{
wrap, AnyError, InvalidRequestedVersion, MissingEntrypointError,
NoInstallInUserProvidedPath, UserCancelledInstallation, WrappedError,
},
http,
input::{prompt_yn, ProgressBarReporter},
},
};
/// Parsed instance that a user can request.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(tag = "t", content = "c")]
pub enum RequestedVersion {
Quality(options::Quality),
Version {
version: String,
quality: options::Quality,
},
Commit {
commit: String,
quality: options::Quality,
},
Path(String),
Quality(options::Quality),
Version {
version: String,
quality: options::Quality,
},
Commit {
commit: String,
quality: options::Quality,
},
Path(String),
}
lazy_static! {
static ref SEMVER_RE: Regex = Regex::new(r"^\d+\.\d+\.\d+(-insider)?$").unwrap();
static ref COMMIT_RE: Regex = Regex::new(r"^[a-z]+/[a-e0-f]{40}$").unwrap();
static ref SEMVER_RE: Regex = Regex::new(r"^\d+\.\d+\.\d+(-insider)?$").unwrap();
static ref COMMIT_RE: Regex = Regex::new(r"^[a-z]+/[a-e0-f]{40}$").unwrap();
}
impl RequestedVersion {
pub fn get_command(&self) -> String {
match self {
RequestedVersion::Quality(quality) => {
format!("code version use {}", quality.get_machine_name())
}
RequestedVersion::Version { version, .. } => {
format!("code version use {}", version)
}
RequestedVersion::Commit { commit, quality } => {
format!("code version use {}/{}", quality.get_machine_name(), commit)
}
RequestedVersion::Path(path) => {
format!("code version use {}", path)
}
}
}
pub fn get_command(&self) -> String {
match self {
RequestedVersion::Quality(quality) => {
format!("code version use {}", quality.get_machine_name())
}
RequestedVersion::Version { version, .. } => {
format!("code version use {}", version)
}
RequestedVersion::Commit { commit, quality } => {
format!("code version use {}/{}", quality.get_machine_name(), commit)
}
RequestedVersion::Path(path) => {
format!("code version use {}", path)
}
}
}
}
impl std::fmt::Display for RequestedVersion {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
RequestedVersion::Quality(quality) => write!(f, "{}", quality.get_capitalized_name()),
RequestedVersion::Version { version, .. } => {
write!(f, "{}", version)
}
RequestedVersion::Commit { commit, quality } => {
write!(f, "{}/{}", quality, commit)
}
RequestedVersion::Path(path) => write!(f, "{}", path),
}
}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
RequestedVersion::Quality(quality) => write!(f, "{}", quality.get_capitalized_name()),
RequestedVersion::Version { version, .. } => {
write!(f, "{}", version)
}
RequestedVersion::Commit { commit, quality } => {
write!(f, "{}/{}", quality, commit)
}
RequestedVersion::Path(path) => write!(f, "{}", path),
}
}
}
impl TryFrom<&str> for RequestedVersion {
type Error = InvalidRequestedVersion;
type Error = InvalidRequestedVersion;
fn try_from(s: &str) -> Result<Self, Self::Error> {
if let Ok(quality) = options::Quality::try_from(s) {
return Ok(RequestedVersion::Quality(quality));
}
fn try_from(s: &str) -> Result<Self, Self::Error> {
if let Ok(quality) = options::Quality::try_from(s) {
return Ok(RequestedVersion::Quality(quality));
}
if SEMVER_RE.is_match(s) {
return Ok(RequestedVersion::Version {
quality: if s.ends_with("-insider") {
options::Quality::Insiders
} else {
options::Quality::Stable
},
version: s.to_string(),
});
}
if SEMVER_RE.is_match(s) {
return Ok(RequestedVersion::Version {
quality: if s.ends_with("-insider") {
options::Quality::Insiders
} else {
options::Quality::Stable
},
version: s.to_string(),
});
}
if Path::is_absolute(&PathBuf::from(s)) {
return Ok(RequestedVersion::Path(s.to_string()));
}
if Path::is_absolute(&PathBuf::from(s)) {
return Ok(RequestedVersion::Path(s.to_string()));
}
if COMMIT_RE.is_match(s) {
let idx = s.find('/').expect("expected a /");
if let Ok(quality) = options::Quality::try_from(&s[0..idx]) {
return Ok(RequestedVersion::Commit {
commit: s[idx + 1..].to_string(),
quality,
});
}
}
if COMMIT_RE.is_match(s) {
let idx = s.find('/').expect("expected a /");
if let Ok(quality) = options::Quality::try_from(&s[0..idx]) {
return Ok(RequestedVersion::Commit {
commit: s[idx + 1..].to_string(),
quality,
});
}
}
Err(InvalidRequestedVersion())
}
Err(InvalidRequestedVersion())
}
}
#[derive(Serialize, Deserialize, Clone, Default)]
struct Stored {
versions: Vec<RequestedVersion>,
current: usize,
versions: Vec<RequestedVersion>,
current: usize,
}
pub struct CodeVersionManager {
state: PersistedState<Stored>,
platform: Platform,
storage_dir: PathBuf,
state: PersistedState<Stored>,
platform: Platform,
storage_dir: PathBuf,
}
impl CodeVersionManager {
pub fn new(lp: &LauncherPaths, platform: Platform) -> Self {
CodeVersionManager {
state: PersistedState::new(lp.root().join("versions.json")),
storage_dir: lp.root().join("desktop"),
platform,
}
}
pub fn new(lp: &LauncherPaths, platform: Platform) -> Self {
CodeVersionManager {
state: PersistedState::new(lp.root().join("versions.json")),
storage_dir: lp.root().join("desktop"),
platform,
}
}
/// Sets the "version" as the persisted one for the user.
pub fn set_preferred_version(&self, version: &RequestedVersion) -> Result<(), AnyError> {
let mut stored = self.state.load();
if let Some(i) = stored.versions.iter().position(|v| v == version) {
stored.current = i;
} else {
stored.current = stored.versions.len();
stored.versions.push(version.clone());
}
/// Sets the "version" as the persisted one for the user.
pub fn set_preferred_version(&self, version: &RequestedVersion) -> Result<(), AnyError> {
let mut stored = self.state.load();
if let Some(i) = stored.versions.iter().position(|v| v == version) {
stored.current = i;
} else {
stored.current = stored.versions.len();
stored.versions.push(version.clone());
}
self.state.save(stored)?;
self.state.save(stored)?;
Ok(())
}
Ok(())
}
/// Lists installed versions.
pub fn list(&self) -> Vec<RequestedVersion> {
self.state.load().versions
}
/// Lists installed versions.
pub fn list(&self) -> Vec<RequestedVersion> {
self.state.load().versions
}
/// Uninstalls a previously installed version.
pub async fn uninstall(&self, version: &RequestedVersion) -> Result<(), AnyError> {
let mut stored = self.state.load();
if let Some(i) = stored.versions.iter().position(|v| v == version) {
if i > stored.current && i > 0 {
stored.current -= 1;
}
stored.versions.remove(i);
self.state.save(stored)?;
}
/// Uninstalls a previously installed version.
pub async fn uninstall(&self, version: &RequestedVersion) -> Result<(), AnyError> {
let mut stored = self.state.load();
if let Some(i) = stored.versions.iter().position(|v| v == version) {
if i > stored.current && i > 0 {
stored.current -= 1;
}
stored.versions.remove(i);
self.state.save(stored)?;
}
remove_dir_all(self.get_install_dir(version))
.await
.map_err(|e| wrap(e, "error deleting vscode directory"))?;
remove_dir_all(self.get_install_dir(version))
.await
.map_err(|e| wrap(e, "error deleting vscode directory"))?;
Ok(())
}
Ok(())
}
pub fn get_preferred_version(&self) -> RequestedVersion {
let stored = self.state.load();
stored
.versions
.get(stored.current)
.unwrap_or(&RequestedVersion::Quality(options::Quality::Stable))
.clone()
}
pub fn get_preferred_version(&self) -> RequestedVersion {
let stored = self.state.load();
stored
.versions
.get(stored.current)
.unwrap_or(&RequestedVersion::Quality(options::Quality::Stable))
.clone()
}
/// Installs the release for the given request. This always runs and does not
/// prompt, so you may want to use `try_get_entrypoint` first.
pub async fn install(
&self,
update_service: &UpdateService,
version: &RequestedVersion,
) -> Result<PathBuf, AnyError> {
let target_dir = self.get_install_dir(version);
let release = get_release_for_request(update_service, version, self.platform).await?;
install_release_into(update_service, &target_dir, &release).await?;
/// Installs the release for the given request. This always runs and does not
/// prompt, so you may want to use `try_get_entrypoint` first.
pub async fn install(
&self,
update_service: &UpdateService,
version: &RequestedVersion,
) -> Result<PathBuf, AnyError> {
let target_dir = self.get_install_dir(version);
let release = get_release_for_request(update_service, version, self.platform).await?;
install_release_into(update_service, &target_dir, &release).await?;
if let Some(p) = try_get_entrypoint(&target_dir).await {
return Ok(p);
}
if let Some(p) = try_get_entrypoint(&target_dir).await {
return Ok(p);
}
Err(MissingEntrypointError().into())
}
Err(MissingEntrypointError().into())
}
/// Tries to get the entrypoint in the installed version, if one exists.
pub async fn try_get_entrypoint(&self, version: &RequestedVersion) -> Option<PathBuf> {
try_get_entrypoint(&self.get_install_dir(version)).await
}
/// Tries to get the entrypoint in the installed version, if one exists.
pub async fn try_get_entrypoint(&self, version: &RequestedVersion) -> Option<PathBuf> {
try_get_entrypoint(&self.get_install_dir(version)).await
}
fn get_install_dir(&self, version: &RequestedVersion) -> PathBuf {
let (name, quality) = match version {
RequestedVersion::Path(path) => return PathBuf::from(path),
RequestedVersion::Quality(quality) => (quality.get_machine_name(), quality),
RequestedVersion::Version {
quality,
version: number,
} => (number.as_str(), quality),
RequestedVersion::Commit { commit, quality } => (commit.as_str(), quality),
};
fn get_install_dir(&self, version: &RequestedVersion) -> PathBuf {
let (name, quality) = match version {
RequestedVersion::Path(path) => return PathBuf::from(path),
RequestedVersion::Quality(quality) => (quality.get_machine_name(), quality),
RequestedVersion::Version {
quality,
version: number,
} => (number.as_str(), quality),
RequestedVersion::Commit { commit, quality } => (commit.as_str(), quality),
};
let mut dir = self.storage_dir.join(name);
if cfg!(target_os = "macos") {
dir.push(format!("{}.app", quality.get_app_name()))
}
let mut dir = self.storage_dir.join(name);
if cfg!(target_os = "macos") {
dir.push(format!("{}.app", quality.get_app_name()))
}
dir
}
dir
}
}
/// Shows a nice UI prompt to users asking them if they want to install the
/// requested version.
pub fn prompt_to_install(version: &RequestedVersion) -> Result<(), AnyError> {
if let RequestedVersion::Path(path) = version {
return Err(NoInstallInUserProvidedPath(path.clone()).into());
}
if let RequestedVersion::Path(path) = version {
return Err(NoInstallInUserProvidedPath(path.clone()).into());
}
if !prompt_yn(&format!(
"VS Code {} is not installed yet, install it now?",
version
))? {
return Err(UserCancelledInstallation().into());
}
if !prompt_yn(&format!(
"VS Code {} is not installed yet, install it now?",
version
))? {
return Err(UserCancelledInstallation().into());
}
Ok(())
Ok(())
}
async fn get_release_for_request(
update_service: &UpdateService,
request: &RequestedVersion,
platform: Platform,
update_service: &UpdateService,
request: &RequestedVersion,
platform: Platform,
) -> Result<Release, WrappedError> {
match request {
RequestedVersion::Version {
quality,
version: number,
} => update_service
.get_release_by_semver_version(platform, TargetKind::Archive, *quality, number)
.await
.map_err(|e| wrap(e, "Could not get release")),
RequestedVersion::Commit { commit, quality } => Ok(Release {
platform,
commit: commit.clone(),
quality: *quality,
target: TargetKind::Archive,
}),
RequestedVersion::Quality(quality) => update_service
.get_latest_commit(platform, TargetKind::Archive, *quality)
.await
.map_err(|e| wrap(e, "Could not get release")),
_ => panic!("cannot get release info for a path"),
}
match request {
RequestedVersion::Version {
quality,
version: number,
} => update_service
.get_release_by_semver_version(platform, TargetKind::Archive, *quality, number)
.await
.map_err(|e| wrap(e, "Could not get release")),
RequestedVersion::Commit { commit, quality } => Ok(Release {
platform,
commit: commit.clone(),
quality: *quality,
target: TargetKind::Archive,
}),
RequestedVersion::Quality(quality) => update_service
.get_latest_commit(platform, TargetKind::Archive, *quality)
.await
.map_err(|e| wrap(e, "Could not get release")),
_ => panic!("cannot get release info for a path"),
}
}
async fn install_release_into(
update_service: &UpdateService,
path: &Path,
release: &Release,
update_service: &UpdateService,
path: &Path,
release: &Release,
) -> Result<(), AnyError> {
let tempdir =
tempfile::tempdir().map_err(|e| wrap(e, "error creating temporary download dir"))?;
let save_path = tempdir.path().join("vscode");
let tempdir =
tempfile::tempdir().map_err(|e| wrap(e, "error creating temporary download dir"))?;
let save_path = tempdir.path().join("vscode");
let stream = update_service.get_download_stream(release).await?;
let pb = ProgressBar::new(1);
pb.set_message("Downloading...");
let progress = ProgressBarReporter::from(pb);
http::download_into_file(&save_path, progress, stream).await?;
let stream = update_service.get_download_stream(release).await?;
let pb = ProgressBar::new(1);
pb.set_message("Downloading...");
let progress = ProgressBarReporter::from(pb);
http::download_into_file(&save_path, progress, stream).await?;
let pb = ProgressBar::new(1);
pb.set_message("Unzipping...");
let progress = ProgressBarReporter::from(pb);
unzip_downloaded_release(&save_path, path, progress)?;
let pb = ProgressBar::new(1);
pb.set_message("Unzipping...");
let progress = ProgressBarReporter::from(pb);
unzip_downloaded_release(&save_path, path, progress)?;
drop(tempdir);
drop(tempdir);
Ok(())
Ok(())
}
/// Tries to find the binary entrypoint for VS Code installed in the path.
async fn try_get_entrypoint(path: &Path) -> Option<PathBuf> {
use tokio::sync::mpsc;
use tokio::sync::mpsc;
let (tx, mut rx) = mpsc::channel(1);
let (tx, mut rx) = mpsc::channel(1);
// Look for all the possible paths in parallel
for entry in DESKTOP_CLI_RELATIVE_PATH.split(',') {
let my_path = path.join(entry);
let my_tx = tx.clone();
tokio::spawn(async move {
if tokio::fs::metadata(&my_path).await.is_ok() {
my_tx.send(my_path).await.ok();
}
});
}
// Look for all the possible paths in parallel
for entry in DESKTOP_CLI_RELATIVE_PATH.split(',') {
let my_path = path.join(entry);
let my_tx = tx.clone();
tokio::spawn(async move {
if tokio::fs::metadata(&my_path).await.is_ok() {
my_tx.send(my_path).await.ok();
}
});
}
drop(tx); // drop so rx gets None if no sender emits
drop(tx); // drop so rx gets None if no sender emits
rx.recv().await
rx.recv().await
}
const DESKTOP_CLI_RELATIVE_PATH: &str = if cfg!(target_os = "macos") {
"Contents/Resources/app/bin/code"
"Contents/Resources/app/bin/code"
} else if cfg!(target_os = "windows") {
"bin/code.cmd,bin/code-insiders.cmd,bin/code-exploration.cmd"
"bin/code.cmd,bin/code-insiders.cmd,bin/code-exploration.cmd"
} else {
"bin/code,bin/code-insiders,bin/code-exploration"
"bin/code,bin/code-insiders,bin/code-exploration"
};
#[cfg(test)]
mod tests {
use std::{
fs::{create_dir_all, File},
io::Write,
};
use std::{
fs::{create_dir_all, File},
io::Write,
};
use super::*;
use super::*;
fn make_fake_vscode_install(path: &Path, quality: options::Quality) {
let bin = DESKTOP_CLI_RELATIVE_PATH
.split(',')
.next()
.expect("expected exe path");
fn make_fake_vscode_install(path: &Path, quality: options::Quality) {
let bin = DESKTOP_CLI_RELATIVE_PATH
.split(',')
.next()
.expect("expected exe path");
let binary_file_path = if cfg!(target_os = "macos") {
path.join(format!("{}.app/{}", quality.get_app_name(), bin))
} else {
path.join(bin)
};
let binary_file_path = if cfg!(target_os = "macos") {
path.join(format!("{}.app/{}", quality.get_app_name(), bin))
} else {
path.join(bin)
};
let parent_dir_path = binary_file_path.parent().expect("expected parent path");
let parent_dir_path = binary_file_path.parent().expect("expected parent path");
create_dir_all(parent_dir_path).expect("expected to create parent dir");
create_dir_all(parent_dir_path).expect("expected to create parent dir");
let mut binary_file = File::create(binary_file_path).expect("expected to make file");
binary_file
.write_all(b"")
.expect("expected to write binary");
}
let mut binary_file = File::create(binary_file_path).expect("expected to make file");
binary_file
.write_all(b"")
.expect("expected to write binary");
}
fn make_multiple_vscode_install() -> tempfile::TempDir {
let dir = tempfile::tempdir().expect("expected to make temp dir");
make_fake_vscode_install(&dir.path().join("desktop/stable"), options::Quality::Stable);
make_fake_vscode_install(&dir.path().join("desktop/1.68.2"), options::Quality::Stable);
dir
}
fn make_multiple_vscode_install() -> tempfile::TempDir {
let dir = tempfile::tempdir().expect("expected to make temp dir");
make_fake_vscode_install(&dir.path().join("desktop/stable"), options::Quality::Stable);
make_fake_vscode_install(&dir.path().join("desktop/1.68.2"), options::Quality::Stable);
dir
}
#[test]
fn test_requested_version_parses() {
assert_eq!(
RequestedVersion::try_from("1.2.3").unwrap(),
RequestedVersion::Version {
quality: options::Quality::Stable,
version: "1.2.3".to_string(),
}
);
#[test]
fn test_requested_version_parses() {
assert_eq!(
RequestedVersion::try_from("1.2.3").unwrap(),
RequestedVersion::Version {
quality: options::Quality::Stable,
version: "1.2.3".to_string(),
}
);
assert_eq!(
RequestedVersion::try_from("1.2.3-insider").unwrap(),
RequestedVersion::Version {
quality: options::Quality::Insiders,
version: "1.2.3-insider".to_string(),
}
);
assert_eq!(
RequestedVersion::try_from("1.2.3-insider").unwrap(),
RequestedVersion::Version {
quality: options::Quality::Insiders,
version: "1.2.3-insider".to_string(),
}
);
assert_eq!(
RequestedVersion::try_from("stable").unwrap(),
RequestedVersion::Quality(options::Quality::Stable)
);
assert_eq!(
RequestedVersion::try_from("stable").unwrap(),
RequestedVersion::Quality(options::Quality::Stable)
);
assert_eq!(
RequestedVersion::try_from("insiders").unwrap(),
RequestedVersion::Quality(options::Quality::Insiders)
);
assert_eq!(
RequestedVersion::try_from("insiders").unwrap(),
RequestedVersion::Quality(options::Quality::Insiders)
);
assert_eq!(
RequestedVersion::try_from("insiders/92fd228156aafeb326b23f6604028d342152313b")
.unwrap(),
RequestedVersion::Commit {
commit: "92fd228156aafeb326b23f6604028d342152313b".to_string(),
quality: options::Quality::Insiders
}
);
assert_eq!(
RequestedVersion::try_from("insiders/92fd228156aafeb326b23f6604028d342152313b")
.unwrap(),
RequestedVersion::Commit {
commit: "92fd228156aafeb326b23f6604028d342152313b".to_string(),
quality: options::Quality::Insiders
}
);
assert_eq!(
RequestedVersion::try_from("stable/92fd228156aafeb326b23f6604028d342152313b").unwrap(),
RequestedVersion::Commit {
commit: "92fd228156aafeb326b23f6604028d342152313b".to_string(),
quality: options::Quality::Stable
}
);
assert_eq!(
RequestedVersion::try_from("stable/92fd228156aafeb326b23f6604028d342152313b").unwrap(),
RequestedVersion::Commit {
commit: "92fd228156aafeb326b23f6604028d342152313b".to_string(),
quality: options::Quality::Stable
}
);
let exe = std::env::current_exe()
.expect("expected to get exe")
.to_string_lossy()
.to_string();
assert_eq!(
RequestedVersion::try_from((&exe).as_str()).unwrap(),
RequestedVersion::Path(exe),
);
}
let exe = std::env::current_exe()
.expect("expected to get exe")
.to_string_lossy()
.to_string();
assert_eq!(
RequestedVersion::try_from((&exe).as_str()).unwrap(),
RequestedVersion::Path(exe),
);
}
#[test]
fn test_set_preferred_version() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm1 = CodeVersionManager::new(&lp, Platform::LinuxARM64);
#[test]
fn test_set_preferred_version() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm1 = CodeVersionManager::new(&lp, Platform::LinuxARM64);
assert_eq!(
vm1.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Stable)
);
vm1.set_preferred_version(&RequestedVersion::Quality(options::Quality::Exploration))
.expect("expected to store");
vm1.set_preferred_version(&RequestedVersion::Quality(options::Quality::Insiders))
.expect("expected to store");
assert_eq!(
vm1.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Insiders)
);
assert_eq!(
vm1.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Stable)
);
vm1.set_preferred_version(&RequestedVersion::Quality(options::Quality::Exploration))
.expect("expected to store");
vm1.set_preferred_version(&RequestedVersion::Quality(options::Quality::Insiders))
.expect("expected to store");
assert_eq!(
vm1.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Insiders)
);
let vm2 = CodeVersionManager::new(&lp, Platform::LinuxARM64);
assert_eq!(
vm2.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Insiders)
);
let vm2 = CodeVersionManager::new(&lp, Platform::LinuxARM64);
assert_eq!(
vm2.get_preferred_version(),
RequestedVersion::Quality(options::Quality::Insiders)
);
assert_eq!(
vm2.list(),
vec![
RequestedVersion::Quality(options::Quality::Exploration),
RequestedVersion::Quality(options::Quality::Insiders)
]
);
}
assert_eq!(
vm2.list(),
vec![
RequestedVersion::Quality(options::Quality::Exploration),
RequestedVersion::Quality(options::Quality::Insiders)
]
);
}
#[tokio::test]
async fn test_gets_entrypoint() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm = CodeVersionManager::new(&lp, Platform::LinuxARM64);
#[tokio::test]
async fn test_gets_entrypoint() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm = CodeVersionManager::new(&lp, Platform::LinuxARM64);
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Stable))
.await
.is_some());
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Stable))
.await
.is_some());
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Exploration))
.await
.is_none());
}
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Exploration))
.await
.is_none());
}
#[tokio::test]
async fn test_uninstall() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm = CodeVersionManager::new(&lp, Platform::LinuxARM64);
#[tokio::test]
async fn test_uninstall() {
let dir = make_multiple_vscode_install();
let lp = LauncherPaths::new_without_replacements(dir.path().to_owned());
let vm = CodeVersionManager::new(&lp, Platform::LinuxARM64);
vm.uninstall(&RequestedVersion::Quality(options::Quality::Stable))
.await
.expect("expected to uninsetall");
vm.uninstall(&RequestedVersion::Quality(options::Quality::Stable))
.await
.expect("expected to uninsetall");
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Stable))
.await
.is_none());
}
assert!(vm
.try_get_entrypoint(&RequestedVersion::Quality(options::Quality::Stable))
.await
.is_none());
}
}

View file

@ -12,8 +12,8 @@ pub mod log;
pub mod commands;
pub mod desktop;
pub mod options;
pub mod tunnels;
pub mod state;
pub mod tunnels;
pub mod update;
pub mod update_service;
pub mod util;

View file

@ -5,14 +5,14 @@
use chrono::Local;
use opentelemetry::{
sdk::trace::Tracer,
trace::{SpanBuilder, Tracer as TraitTracer},
sdk::trace::Tracer,
trace::{SpanBuilder, Tracer as TraitTracer},
};
use std::fmt;
use std::{env, path::Path, sync::Arc};
use std::{
io::Write,
sync::atomic::{AtomicU32, Ordering},
io::Write,
sync::atomic::{AtomicU32, Ordering},
};
const NO_COLOR_ENV: &str = "NO_COLOR";
@ -21,282 +21,282 @@ static INSTANCE_COUNTER: AtomicU32 = AtomicU32::new(0);
// Gets a next incrementing number that can be used in logs
pub fn next_counter() -> u32 {
INSTANCE_COUNTER.fetch_add(1, Ordering::SeqCst)
INSTANCE_COUNTER.fetch_add(1, Ordering::SeqCst)
}
// Log level
#[derive(clap::ArgEnum, PartialEq, Eq, PartialOrd, Clone, Copy, Debug)]
pub enum Level {
Trace = 0,
Debug,
Info,
Warn,
Error,
Critical,
Off,
Trace = 0,
Debug,
Info,
Warn,
Error,
Critical,
Off,
}
impl Default for Level {
fn default() -> Self {
Level::Info
}
fn default() -> Self {
Level::Info
}
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Level::Critical => write!(f, "critical"),
Level::Debug => write!(f, "debug"),
Level::Error => write!(f, "error"),
Level::Info => write!(f, "info"),
Level::Off => write!(f, "off"),
Level::Trace => write!(f, "trace"),
Level::Warn => write!(f, "warn"),
}
}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Level::Critical => write!(f, "critical"),
Level::Debug => write!(f, "debug"),
Level::Error => write!(f, "error"),
Level::Info => write!(f, "info"),
Level::Off => write!(f, "off"),
Level::Trace => write!(f, "trace"),
Level::Warn => write!(f, "warn"),
}
}
}
impl Level {
pub fn name(&self) -> Option<&str> {
match self {
Level::Trace => Some("trace"),
Level::Debug => Some("debug"),
Level::Info => Some("info"),
Level::Warn => Some("warn"),
Level::Error => Some("error"),
Level::Critical => Some("critical"),
Level::Off => None,
}
}
pub fn name(&self) -> Option<&str> {
match self {
Level::Trace => Some("trace"),
Level::Debug => Some("debug"),
Level::Info => Some("info"),
Level::Warn => Some("warn"),
Level::Error => Some("error"),
Level::Critical => Some("critical"),
Level::Off => None,
}
}
pub fn color_code(&self) -> Option<&str> {
if env::var(NO_COLOR_ENV).is_ok() || !atty::is(atty::Stream::Stdout) {
return None;
}
pub fn color_code(&self) -> Option<&str> {
if env::var(NO_COLOR_ENV).is_ok() || !atty::is(atty::Stream::Stdout) {
return None;
}
match self {
Level::Trace => None,
Level::Debug => Some("\x1b[36m"),
Level::Info => Some("\x1b[35m"),
Level::Warn => Some("\x1b[33m"),
Level::Error => Some("\x1b[31m"),
Level::Critical => Some("\x1b[31m"),
Level::Off => None,
}
}
match self {
Level::Trace => None,
Level::Debug => Some("\x1b[36m"),
Level::Info => Some("\x1b[35m"),
Level::Warn => Some("\x1b[33m"),
Level::Error => Some("\x1b[31m"),
Level::Critical => Some("\x1b[31m"),
Level::Off => None,
}
}
pub fn to_u8(self) -> u8 {
self as u8
}
pub fn to_u8(self) -> u8 {
self as u8
}
}
pub fn new_tunnel_prefix() -> String {
format!("[tunnel.{}]", next_counter())
format!("[tunnel.{}]", next_counter())
}
pub fn new_code_server_prefix() -> String {
format!("[codeserver.{}]", next_counter())
format!("[codeserver.{}]", next_counter())
}
pub fn new_rpc_prefix() -> String {
format!("[rpc.{}]", next_counter())
format!("[rpc.{}]", next_counter())
}
// Base logger implementation
#[derive(Clone)]
pub struct Logger {
tracer: Tracer,
sink: Vec<Box<dyn LogSink>>,
prefix: Option<String>,
tracer: Tracer,
sink: Vec<Box<dyn LogSink>>,
prefix: Option<String>,
}
// Copy trick from https://stackoverflow.com/a/30353928
pub trait LogSinkClone {
fn clone_box(&self) -> Box<dyn LogSink>;
fn clone_box(&self) -> Box<dyn LogSink>;
}
impl<T> LogSinkClone for T
where
T: 'static + LogSink + Clone,
T: 'static + LogSink + Clone,
{
fn clone_box(&self) -> Box<dyn LogSink> {
Box::new(self.clone())
}
fn clone_box(&self) -> Box<dyn LogSink> {
Box::new(self.clone())
}
}
pub trait LogSink: LogSinkClone + Sync + Send {
fn write_log(&self, level: Level, prefix: &str, message: &str);
fn write_result(&self, message: &str);
fn write_log(&self, level: Level, prefix: &str, message: &str);
fn write_result(&self, message: &str);
}
impl Clone for Box<dyn LogSink> {
fn clone(&self) -> Box<dyn LogSink> {
self.clone_box()
}
fn clone(&self) -> Box<dyn LogSink> {
self.clone_box()
}
}
#[derive(Clone)]
pub struct StdioLogSink {
level: Level,
level: Level,
}
impl LogSink for StdioLogSink {
fn write_log(&self, level: Level, prefix: &str, message: &str) {
if level < self.level {
return;
}
fn write_log(&self, level: Level, prefix: &str, message: &str) {
if level < self.level {
return;
}
emit(level, prefix, message);
}
emit(level, prefix, message);
}
fn write_result(&self, message: &str) {
println!("{}", message);
}
fn write_result(&self, message: &str) {
println!("{}", message);
}
}
#[derive(Clone)]
pub struct FileLogSink {
level: Level,
file: Arc<std::sync::Mutex<std::fs::File>>,
level: Level,
file: Arc<std::sync::Mutex<std::fs::File>>,
}
impl FileLogSink {
pub fn new(level: Level, path: &Path) -> std::io::Result<Self> {
let file = std::fs::File::create(path)?;
Ok(Self {
level,
file: Arc::new(std::sync::Mutex::new(file)),
})
}
pub fn new(level: Level, path: &Path) -> std::io::Result<Self> {
let file = std::fs::File::create(path)?;
Ok(Self {
level,
file: Arc::new(std::sync::Mutex::new(file)),
})
}
}
impl LogSink for FileLogSink {
fn write_log(&self, level: Level, prefix: &str, message: &str) {
if level < self.level {
return;
}
fn write_log(&self, level: Level, prefix: &str, message: &str) {
if level < self.level {
return;
}
let line = format(level, prefix, message);
let line = format(level, prefix, message);
// ignore any errors, not much we can do if logging fails...
self.file.lock().unwrap().write_all(line.as_bytes()).ok();
}
// ignore any errors, not much we can do if logging fails...
self.file.lock().unwrap().write_all(line.as_bytes()).ok();
}
fn write_result(&self, _message: &str) {}
fn write_result(&self, _message: &str) {}
}
impl Logger {
pub fn new(tracer: Tracer, level: Level) -> Self {
Self {
tracer,
sink: vec![Box::new(StdioLogSink { level })],
prefix: None,
}
}
pub fn new(tracer: Tracer, level: Level) -> Self {
Self {
tracer,
sink: vec![Box::new(StdioLogSink { level })],
prefix: None,
}
}
pub fn span(&self, name: &str) -> SpanBuilder {
self.tracer.span_builder(format!("serverlauncher/{}", name))
}
pub fn span(&self, name: &str) -> SpanBuilder {
self.tracer.span_builder(format!("serverlauncher/{}", name))
}
pub fn tracer(&self) -> &Tracer {
&self.tracer
}
pub fn tracer(&self) -> &Tracer {
&self.tracer
}
pub fn emit(&self, level: Level, message: &str) {
let prefix = self.prefix.as_deref().unwrap_or("");
for sink in &self.sink {
sink.write_log(level, prefix, message);
}
}
pub fn emit(&self, level: Level, message: &str) {
let prefix = self.prefix.as_deref().unwrap_or("");
for sink in &self.sink {
sink.write_log(level, prefix, message);
}
}
pub fn result(&self, message: &str) {
for sink in &self.sink {
sink.write_result(message);
}
}
pub fn result(&self, message: &str) {
for sink in &self.sink {
sink.write_result(message);
}
}
pub fn prefixed(&self, prefix: &str) -> Logger {
Logger {
prefix: Some(match &self.prefix {
Some(p) => format!("{}{} ", p, prefix),
None => format!("{} ", prefix),
}),
..self.clone()
}
}
pub fn prefixed(&self, prefix: &str) -> Logger {
Logger {
prefix: Some(match &self.prefix {
Some(p) => format!("{}{} ", p, prefix),
None => format!("{} ", prefix),
}),
..self.clone()
}
}
/// Creates a new logger with the additional log sink added.
pub fn tee<T>(&self, sink: T) -> Logger
where
T: LogSink + 'static,
{
let mut new_sinks = self.sink.clone();
new_sinks.push(Box::new(sink));
/// Creates a new logger with the additional log sink added.
pub fn tee<T>(&self, sink: T) -> Logger
where
T: LogSink + 'static,
{
let mut new_sinks = self.sink.clone();
new_sinks.push(Box::new(sink));
Logger {
sink: new_sinks,
..self.clone()
}
}
Logger {
sink: new_sinks,
..self.clone()
}
}
pub fn get_download_logger<'a>(&'a self, prefix: &'static str) -> DownloadLogger<'a> {
DownloadLogger {
prefix,
logger: self,
}
}
pub fn get_download_logger<'a>(&'a self, prefix: &'static str) -> DownloadLogger<'a> {
DownloadLogger {
prefix,
logger: self,
}
}
}
pub struct DownloadLogger<'a> {
prefix: &'static str,
logger: &'a Logger,
prefix: &'static str,
logger: &'a Logger,
}
impl<'a> crate::util::io::ReportCopyProgress for DownloadLogger<'a> {
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64) {
if total_bytes > 0 {
self.logger.emit(
Level::Trace,
&format!(
"{} {}/{} ({:.0}%)",
self.prefix,
bytes_so_far,
total_bytes,
(bytes_so_far as f64 / total_bytes as f64) * 100.0,
),
);
} else {
self.logger.emit(
Level::Trace,
&format!("{} {}/{}", self.prefix, bytes_so_far, total_bytes,),
);
}
}
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64) {
if total_bytes > 0 {
self.logger.emit(
Level::Trace,
&format!(
"{} {}/{} ({:.0}%)",
self.prefix,
bytes_so_far,
total_bytes,
(bytes_so_far as f64 / total_bytes as f64) * 100.0,
),
);
} else {
self.logger.emit(
Level::Trace,
&format!("{} {}/{}", self.prefix, bytes_so_far, total_bytes,),
);
}
}
}
pub fn format(level: Level, prefix: &str, message: &str) -> String {
let current = Local::now();
let timestamp = current.format("%Y-%m-%d %H:%M:%S").to_string();
let current = Local::now();
let timestamp = current.format("%Y-%m-%d %H:%M:%S").to_string();
let name = level.name().unwrap();
let name = level.name().unwrap();
if let Some(c) = level.color_code() {
format!(
"\x1b[2m[{}]\x1b[0m {}{}\x1b[0m {}{}\n",
timestamp, c, name, prefix, message
)
} else {
format!("[{}] {} {}{}\n", timestamp, name, prefix, message)
}
if let Some(c) = level.color_code() {
format!(
"\x1b[2m[{}]\x1b[0m {}{}\x1b[0m {}{}\n",
timestamp, c, name, prefix, message
)
} else {
format!("[{}] {} {}{}\n", timestamp, name, prefix, message)
}
}
pub fn emit(level: Level, prefix: &str, message: &str) {
let line = format(level, prefix, message);
if level == Level::Trace {
print!("\x1b[2m{}\x1b[0m", line);
} else {
print!("{}", line);
}
let line = format(level, prefix, message);
if level == Level::Trace {
print!("\x1b[2m{}\x1b[0m", line);
} else {
print!("{}", line);
}
}
#[macro_export]
@ -351,39 +351,39 @@ macro_rules! warning {
#[macro_export]
macro_rules! span {
($logger:expr, $span:expr, $func:expr) => {{
use opentelemetry::trace::TraceContextExt;
($logger:expr, $span:expr, $func:expr) => {{
use opentelemetry::trace::TraceContextExt;
let span = $span.start($logger.tracer());
let cx = opentelemetry::Context::current_with_span(span);
let guard = cx.clone().attach();
let t = $func;
let span = $span.start($logger.tracer());
let cx = opentelemetry::Context::current_with_span(span);
let guard = cx.clone().attach();
let t = $func;
if let Err(e) = &t {
cx.span().record_error(e);
}
if let Err(e) = &t {
cx.span().record_error(e);
}
std::mem::drop(guard);
std::mem::drop(guard);
t
}};
t
}};
}
#[macro_export]
macro_rules! spanf {
($logger:expr, $span:expr, $func:expr) => {{
use opentelemetry::trace::{FutureExt, TraceContextExt};
($logger:expr, $span:expr, $func:expr) => {{
use opentelemetry::trace::{FutureExt, TraceContextExt};
let span = $span.start($logger.tracer());
let cx = opentelemetry::Context::current_with_span(span);
let t = $func.with_context(cx.clone()).await;
let span = $span.start($logger.tracer());
let cx = opentelemetry::Context::current_with_span(span);
let t = $func.with_context(cx.clone()).await;
if let Err(e) = &t {
cx.span().record_error(e);
}
if let Err(e) = &t {
cx.span().record_error(e);
}
cx.span().end();
cx.span().end();
t
}};
t
}};
}

View file

@ -9,96 +9,96 @@ use serde::{Deserialize, Serialize};
#[derive(clap::ArgEnum, Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Quality {
#[serde(rename = "stable")]
Stable,
#[serde(rename = "exploration")]
Exploration,
#[serde(other)]
Insiders,
#[serde(rename = "stable")]
Stable,
#[serde(rename = "exploration")]
Exploration,
#[serde(other)]
Insiders,
}
impl Quality {
/// Lowercased name in paths and protocol
pub fn get_machine_name(&self) -> &'static str {
match self {
Quality::Insiders => "insiders",
Quality::Exploration => "exploration",
Quality::Stable => "stable",
}
}
/// Lowercased name in paths and protocol
pub fn get_machine_name(&self) -> &'static str {
match self {
Quality::Insiders => "insiders",
Quality::Exploration => "exploration",
Quality::Stable => "stable",
}
}
/// Uppercased display name for humans
pub fn get_capitalized_name(&self) -> &'static str {
match self {
Quality::Insiders => "Insiders",
Quality::Exploration => "Exploration",
Quality::Stable => "Stable",
}
}
/// Uppercased display name for humans
pub fn get_capitalized_name(&self) -> &'static str {
match self {
Quality::Insiders => "Insiders",
Quality::Exploration => "Exploration",
Quality::Stable => "Stable",
}
}
pub fn get_app_name(&self) -> &'static str {
match self {
Quality::Insiders => "Visual Studio Code Insiders",
Quality::Exploration => "Visual Studio Code Exploration",
Quality::Stable => "Visual Studio Code",
}
}
pub fn get_app_name(&self) -> &'static str {
match self {
Quality::Insiders => "Visual Studio Code Insiders",
Quality::Exploration => "Visual Studio Code Exploration",
Quality::Stable => "Visual Studio Code",
}
}
#[cfg(target_os = "windows")]
pub fn server_entrypoint(&self) -> &'static str {
match self {
Quality::Insiders => "code-server-insiders.cmd",
Quality::Exploration => "code-server-exploration.cmd",
Quality::Stable => "code-server.cmd",
}
}
#[cfg(not(target_os = "windows"))]
pub fn server_entrypoint(&self) -> &'static str {
match self {
Quality::Insiders => "code-server-insiders",
Quality::Exploration => "code-server-exploration",
Quality::Stable => "code-server",
}
}
#[cfg(target_os = "windows")]
pub fn server_entrypoint(&self) -> &'static str {
match self {
Quality::Insiders => "code-server-insiders.cmd",
Quality::Exploration => "code-server-exploration.cmd",
Quality::Stable => "code-server.cmd",
}
}
#[cfg(not(target_os = "windows"))]
pub fn server_entrypoint(&self) -> &'static str {
match self {
Quality::Insiders => "code-server-insiders",
Quality::Exploration => "code-server-exploration",
Quality::Stable => "code-server",
}
}
}
impl fmt::Display for Quality {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.get_capitalized_name())
}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.get_capitalized_name())
}
}
impl TryFrom<&str> for Quality {
type Error = String;
type Error = String;
fn try_from(s: &str) -> Result<Self, Self::Error> {
match s {
"stable" => Ok(Quality::Stable),
"insiders" => Ok(Quality::Insiders),
"exploration" => Ok(Quality::Exploration),
_ => Err(format!(
"Unknown quality: {}. Must be one of stable, insiders, or exploration.",
s
)),
}
}
fn try_from(s: &str) -> Result<Self, Self::Error> {
match s {
"stable" => Ok(Quality::Stable),
"insiders" => Ok(Quality::Insiders),
"exploration" => Ok(Quality::Exploration),
_ => Err(format!(
"Unknown quality: {}. Must be one of stable, insiders, or exploration.",
s
)),
}
}
}
#[derive(clap::ArgEnum, Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum TelemetryLevel {
Off,
Crash,
Error,
All,
Off,
Crash,
Error,
All,
}
impl fmt::Display for TelemetryLevel {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TelemetryLevel::Off => write!(f, "off"),
TelemetryLevel::Crash => write!(f, "crash"),
TelemetryLevel::Error => write!(f, "error"),
TelemetryLevel::All => write!(f, "all"),
}
}
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TelemetryLevel::Off => write!(f, "off"),
TelemetryLevel::Crash => write!(f, "crash"),
TelemetryLevel::Error => write!(f, "error"),
TelemetryLevel::All => write!(f, "all"),
}
}
}

View file

@ -6,9 +6,9 @@
extern crate dirs;
use std::{
fs::{create_dir, read_to_string, remove_dir_all, write},
path::{Path, PathBuf},
sync::{Arc, Mutex},
fs::{create_dir, read_to_string, remove_dir_all, write},
path::{Path, PathBuf},
sync::{Arc, Mutex},
};
use serde::{de::DeserializeOwned, Serialize};
@ -19,134 +19,134 @@ const HOME_DIR_ALTS: [&str; 2] = ["$HOME", "~"];
#[derive(Clone)]
pub struct LauncherPaths {
root: PathBuf,
root: PathBuf,
}
struct PersistedStateContainer<T>
where
T: Clone + Serialize + DeserializeOwned + Default,
T: Clone + Serialize + DeserializeOwned + Default,
{
path: PathBuf,
state: Option<T>,
path: PathBuf,
state: Option<T>,
}
impl<T> PersistedStateContainer<T>
where
T: Clone + Serialize + DeserializeOwned + Default,
T: Clone + Serialize + DeserializeOwned + Default,
{
fn load_or_get(&mut self) -> T {
if let Some(state) = &self.state {
return state.clone();
}
fn load_or_get(&mut self) -> T {
if let Some(state) = &self.state {
return state.clone();
}
let state = if let Ok(s) = read_to_string(&self.path) {
serde_json::from_str::<T>(&s).unwrap_or_default()
} else {
T::default()
};
let state = if let Ok(s) = read_to_string(&self.path) {
serde_json::from_str::<T>(&s).unwrap_or_default()
} else {
T::default()
};
self.state = Some(state.clone());
state
}
self.state = Some(state.clone());
state
}
fn save(&mut self, state: T) -> Result<(), WrappedError> {
let s = serde_json::to_string(&state).unwrap();
self.state = Some(state);
write(&self.path, s).map_err(|e| {
wrap(
e,
format!("error saving launcher state into {}", self.path.display()),
)
})
}
fn save(&mut self, state: T) -> Result<(), WrappedError> {
let s = serde_json::to_string(&state).unwrap();
self.state = Some(state);
write(&self.path, s).map_err(|e| {
wrap(
e,
format!("error saving launcher state into {}", self.path.display()),
)
})
}
}
/// Container that holds some state value that is persisted to disk.
#[derive(Clone)]
pub struct PersistedState<T>
where
T: Clone + Serialize + DeserializeOwned + Default,
T: Clone + Serialize + DeserializeOwned + Default,
{
container: Arc<Mutex<PersistedStateContainer<T>>>,
container: Arc<Mutex<PersistedStateContainer<T>>>,
}
impl<T> PersistedState<T>
where
T: Clone + Serialize + DeserializeOwned + Default,
T: Clone + Serialize + DeserializeOwned + Default,
{
/// Creates a new state container that persists to the given path.
pub fn new(path: PathBuf) -> PersistedState<T> {
PersistedState {
container: Arc::new(Mutex::new(PersistedStateContainer { path, state: None })),
}
}
/// Creates a new state container that persists to the given path.
pub fn new(path: PathBuf) -> PersistedState<T> {
PersistedState {
container: Arc::new(Mutex::new(PersistedStateContainer { path, state: None })),
}
}
/// Loads persisted state.
pub fn load(&self) -> T {
self.container.lock().unwrap().load_or_get()
}
/// Loads persisted state.
pub fn load(&self) -> T {
self.container.lock().unwrap().load_or_get()
}
/// Saves persisted state.
pub fn save(&self, state: T) -> Result<(), WrappedError> {
self.container.lock().unwrap().save(state)
}
/// Saves persisted state.
pub fn save(&self, state: T) -> Result<(), WrappedError> {
self.container.lock().unwrap().save(state)
}
/// Mutates persisted state.
pub fn update_with<V, R>(
&self,
v: V,
mutator: fn(v: V, state: &mut T) -> R,
) -> Result<R, WrappedError> {
let mut container = self.container.lock().unwrap();
let mut state = container.load_or_get();
let r = mutator(v, &mut state);
container.save(state).map(|_| r)
}
/// Mutates persisted state.
pub fn update_with<V, R>(
&self,
v: V,
mutator: fn(v: V, state: &mut T) -> R,
) -> Result<R, WrappedError> {
let mut container = self.container.lock().unwrap();
let mut state = container.load_or_get();
let r = mutator(v, &mut state);
container.save(state).map(|_| r)
}
}
impl LauncherPaths {
pub fn new(root: &Option<String>) -> Result<LauncherPaths, AnyError> {
let root = root.as_deref().unwrap_or("~/.vscode-cli");
let mut replaced = root.to_owned();
for token in HOME_DIR_ALTS {
if root.contains(token) {
if let Some(home) = dirs::home_dir() {
replaced = root.replace(token, &home.to_string_lossy())
} else {
return Err(AnyError::from(NoHomeForLauncherError()));
}
}
}
pub fn new(root: &Option<String>) -> Result<LauncherPaths, AnyError> {
let root = root.as_deref().unwrap_or("~/.vscode-cli");
let mut replaced = root.to_owned();
for token in HOME_DIR_ALTS {
if root.contains(token) {
if let Some(home) = dirs::home_dir() {
replaced = root.replace(token, &home.to_string_lossy())
} else {
return Err(AnyError::from(NoHomeForLauncherError()));
}
}
}
if !Path::new(&replaced).exists() {
create_dir(&replaced)
.map_err(|e| wrap(e, format!("error creating directory {}", &replaced)))?;
}
if !Path::new(&replaced).exists() {
create_dir(&replaced)
.map_err(|e| wrap(e, format!("error creating directory {}", &replaced)))?;
}
Ok(LauncherPaths::new_without_replacements(PathBuf::from(
replaced,
)))
}
Ok(LauncherPaths::new_without_replacements(PathBuf::from(
replaced,
)))
}
pub fn new_without_replacements(root: PathBuf) -> LauncherPaths {
LauncherPaths { root }
}
pub fn new_without_replacements(root: PathBuf) -> LauncherPaths {
LauncherPaths { root }
}
/// Root directory for the server launcher
pub fn root(&self) -> &Path {
&self.root
}
/// Root directory for the server launcher
pub fn root(&self) -> &Path {
&self.root
}
/// Removes the launcher data directory.
pub fn remove(&self) -> Result<(), WrappedError> {
remove_dir_all(&self.root).map_err(|e| {
wrap(
e,
format!(
"error removing launcher data directory {}",
self.root.display()
),
)
})
}
/// Removes the launcher data directory.
pub fn remove(&self) -> Result<(), WrappedError> {
remove_dir_all(&self.root).map_err(|e| {
wrap(
e,
format!(
"error removing launcher data directory {}",
self.root.display()
),
)
})
}
}

View file

@ -21,5 +21,5 @@ mod service_windows;
pub use control_server::serve;
pub use service::{
create_service_manager, ServiceContainer, ServiceManager, SERVICE_LOG_FILE_NAME,
create_service_manager, ServiceContainer, ServiceManager, SERVICE_LOG_FILE_NAME,
};

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -7,50 +7,50 @@ use crate::util::errors::{AnyError, MissingLegalConsent};
use crate::util::input::prompt_yn;
use serde::{Deserialize, Serialize};
const LICENSE_TEXT: Option<&'static str> = option_env!("LAUNCHER_REMOTE_LICENSE_TEXT");
const LICENSE_PROMPT: Option<&'static str> = option_env!("LAUNCHER_REMOTE_LICENSE_PROMPT");
const LICENSE_TEXT: Option<&'static str> = option_env!("VSCODE_CLI_REMOTE_LICENSE_TEXT");
const LICENSE_PROMPT: Option<&'static str> = option_env!("VSCODE_CLI_REMOTE_LICENSE_PROMPT");
#[derive(Clone, Default, Serialize, Deserialize)]
struct PersistedConsent {
pub consented: Option<bool>,
pub consented: Option<bool>,
}
pub fn require_consent(paths: &LauncherPaths) -> Result<(), AnyError> {
match LICENSE_TEXT {
Some(t) => println!("{}", t),
None => return Ok(()),
}
match LICENSE_TEXT {
Some(t) => println!("{}", t),
None => return Ok(()),
}
let prompt = match LICENSE_PROMPT {
Some(p) => p,
None => return Ok(()),
};
let prompt = match LICENSE_PROMPT {
Some(p) => p,
None => return Ok(()),
};
let license: PersistedState<PersistedConsent> =
PersistedState::new(paths.root().join("license_consent.json"));
let license: PersistedState<PersistedConsent> =
PersistedState::new(paths.root().join("license_consent.json"));
let mut save = false;
let mut load = license.load();
let mut save = false;
let mut load = license.load();
if !load.consented.unwrap_or(false) {
match prompt_yn(prompt) {
Ok(true) => {
save = true;
load.consented = Some(true);
}
Ok(false) => {
return Err(AnyError::from(MissingLegalConsent(
"Sorry you cannot use VS Code Server CLI without accepting the terms."
.to_string(),
)))
}
Err(e) => return Err(AnyError::from(MissingLegalConsent(e.to_string()))),
}
}
if !load.consented.unwrap_or(false) {
match prompt_yn(prompt) {
Ok(true) => {
save = true;
load.consented = Some(true);
}
Ok(false) => {
return Err(AnyError::from(MissingLegalConsent(
"Sorry you cannot use VS Code Server CLI without accepting the terms."
.to_string(),
)))
}
Err(e) => return Err(AnyError::from(MissingLegalConsent(e.to_string()))),
}
}
if save {
license.save(load)?;
}
if save {
license.save(load)?;
}
Ok(())
Ok(())
}

View file

@ -6,213 +6,213 @@ use rand::prelude::*;
// Adjectives in LEFT from Moby :
static LEFT: &[&str] = &[
"admiring",
"adoring",
"affectionate",
"agitated",
"amazing",
"angry",
"awesome",
"beautiful",
"blissful",
"bold",
"boring",
"brave",
"busy",
"charming",
"clever",
"cool",
"compassionate",
"competent",
"condescending",
"confident",
"cranky",
"crazy",
"dazzling",
"determined",
"distracted",
"dreamy",
"eager",
"ecstatic",
"elastic",
"elated",
"elegant",
"eloquent",
"epic",
"exciting",
"fervent",
"festive",
"flamboyant",
"focused",
"friendly",
"frosty",
"funny",
"gallant",
"gifted",
"goofy",
"gracious",
"great",
"happy",
"hardcore",
"heuristic",
"hopeful",
"hungry",
"infallible",
"inspiring",
"interesting",
"intelligent",
"jolly",
"jovial",
"keen",
"kind",
"laughing",
"loving",
"lucid",
"magical",
"mystifying",
"modest",
"musing",
"naughty",
"nervous",
"nice",
"nifty",
"nostalgic",
"objective",
"optimistic",
"peaceful",
"pedantic",
"pensive",
"practical",
"priceless",
"quirky",
"quizzical",
"recursing",
"relaxed",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"silly",
"sleepy",
"stoic",
"strange",
"stupefied",
"suspicious",
"sweet",
"tender",
"thirsty",
"trusting",
"unruffled",
"upbeat",
"vibrant",
"vigilant",
"vigorous",
"wizardly",
"wonderful",
"xenodochial",
"youthful",
"zealous",
"zen",
"admiring",
"adoring",
"affectionate",
"agitated",
"amazing",
"angry",
"awesome",
"beautiful",
"blissful",
"bold",
"boring",
"brave",
"busy",
"charming",
"clever",
"cool",
"compassionate",
"competent",
"condescending",
"confident",
"cranky",
"crazy",
"dazzling",
"determined",
"distracted",
"dreamy",
"eager",
"ecstatic",
"elastic",
"elated",
"elegant",
"eloquent",
"epic",
"exciting",
"fervent",
"festive",
"flamboyant",
"focused",
"friendly",
"frosty",
"funny",
"gallant",
"gifted",
"goofy",
"gracious",
"great",
"happy",
"hardcore",
"heuristic",
"hopeful",
"hungry",
"infallible",
"inspiring",
"interesting",
"intelligent",
"jolly",
"jovial",
"keen",
"kind",
"laughing",
"loving",
"lucid",
"magical",
"mystifying",
"modest",
"musing",
"naughty",
"nervous",
"nice",
"nifty",
"nostalgic",
"objective",
"optimistic",
"peaceful",
"pedantic",
"pensive",
"practical",
"priceless",
"quirky",
"quizzical",
"recursing",
"relaxed",
"reverent",
"romantic",
"sad",
"serene",
"sharp",
"silly",
"sleepy",
"stoic",
"strange",
"stupefied",
"suspicious",
"sweet",
"tender",
"thirsty",
"trusting",
"unruffled",
"upbeat",
"vibrant",
"vigilant",
"vigorous",
"wizardly",
"wonderful",
"xenodochial",
"youthful",
"zealous",
"zen",
];
static RIGHT: &[&str] = &[
"albatross",
"antbird",
"antpitta",
"antshrike",
"antwren",
"babbler",
"barbet",
"blackbird",
"brushfinch",
"bulbul",
"bunting",
"cisticola",
"cormorant",
"crow",
"cuckoo",
"dove",
"drongo",
"duck",
"eagle",
"falcon",
"fantail",
"finch",
"flowerpecker",
"flycatcher",
"goose",
"goshawk",
"greenbul",
"grosbeak",
"gull",
"hawk",
"heron",
"honeyeater",
"hornbill",
"hummingbird",
"ibis",
"jay",
"kestrel",
"kingfisher",
"kite",
"lark",
"lorikeet",
"magpie",
"mockingbird",
"monarch",
"nightjar",
"oriole",
"owl",
"parakeet",
"parrot",
"partridge",
"penguin",
"petrel",
"pheasant",
"piculet",
"pigeon",
"pitta",
"prinia",
"puffin",
"quail",
"robin",
"sandpiper",
"seedeater",
"shearwater",
"sparrow",
"spinetail",
"starling",
"sunbird",
"swallow",
"swift",
"swiftlet",
"tanager",
"tapaculo",
"tern",
"thornbill",
"tinamou",
"trogon",
"tyrannulet",
"vireo",
"warbler",
"waxbill",
"weaver",
"whistler",
"woodpecker",
"wren",
"albatross",
"antbird",
"antpitta",
"antshrike",
"antwren",
"babbler",
"barbet",
"blackbird",
"brushfinch",
"bulbul",
"bunting",
"cisticola",
"cormorant",
"crow",
"cuckoo",
"dove",
"drongo",
"duck",
"eagle",
"falcon",
"fantail",
"finch",
"flowerpecker",
"flycatcher",
"goose",
"goshawk",
"greenbul",
"grosbeak",
"gull",
"hawk",
"heron",
"honeyeater",
"hornbill",
"hummingbird",
"ibis",
"jay",
"kestrel",
"kingfisher",
"kite",
"lark",
"lorikeet",
"magpie",
"mockingbird",
"monarch",
"nightjar",
"oriole",
"owl",
"parakeet",
"parrot",
"partridge",
"penguin",
"petrel",
"pheasant",
"piculet",
"pigeon",
"pitta",
"prinia",
"puffin",
"quail",
"robin",
"sandpiper",
"seedeater",
"shearwater",
"sparrow",
"spinetail",
"starling",
"sunbird",
"swallow",
"swift",
"swiftlet",
"tanager",
"tapaculo",
"tern",
"thornbill",
"tinamou",
"trogon",
"tyrannulet",
"vireo",
"warbler",
"waxbill",
"weaver",
"whistler",
"woodpecker",
"wren",
];
/// Generates a random avian name, with the optional extra_random_length added
/// to reduce chance of in-flight collisions.
pub fn generate_name(max_length: usize) -> String {
let mut rng = rand::thread_rng();
loop {
let left = LEFT[rng.gen_range(0..LEFT.len())];
let right = RIGHT[rng.gen_range(0..RIGHT.len())];
let s = format!("{}-{}", left, right);
if s.len() < max_length {
return s;
}
}
let mut rng = rand::thread_rng();
loop {
let left = LEFT[rng.gen_range(0..LEFT.len())];
let right = RIGHT[rng.gen_range(0..RIGHT.len())];
let s = format!("{}-{}", left, right);
if s.len() < max_length {
return s;
}
}
}

View file

@ -4,19 +4,19 @@
*--------------------------------------------------------------------------------------------*/
use std::{
fs::{read_dir, read_to_string, remove_dir_all, write},
path::PathBuf,
fs::{read_dir, read_to_string, remove_dir_all, write},
path::PathBuf,
};
use serde::{Deserialize, Serialize};
use crate::{
log, options,
state::{LauncherPaths, PersistedState},
util::{
errors::{wrap, AnyError, WrappedError},
machine,
},
log, options,
state::{LauncherPaths, PersistedState},
util::{
errors::{wrap, AnyError, WrappedError},
machine,
},
};
const INSIDERS_INSTALL_FOLDER: &str = "server-insiders";
@ -26,191 +26,191 @@ const PIDFILE_SUFFIX: &str = ".pid";
const LOGFILE_SUFFIX: &str = ".log";
pub struct ServerPaths {
// Directory into which the server is downloaded
pub server_dir: PathBuf,
// Executable path, within the server_id
pub executable: PathBuf,
// File where logs for the server should be written.
pub logfile: PathBuf,
// File where the process ID for the server should be written.
pub pidfile: PathBuf,
// Directory into which the server is downloaded
pub server_dir: PathBuf,
// Executable path, within the server_id
pub executable: PathBuf,
// File where logs for the server should be written.
pub logfile: PathBuf,
// File where the process ID for the server should be written.
pub pidfile: PathBuf,
}
impl ServerPaths {
// Queries the system to determine the process ID of the running server.
// Returns the process ID, if the server is running.
pub fn get_running_pid(&self) -> Option<u32> {
if let Some(pid) = self.read_pid() {
return match machine::process_at_path_exists(pid, &self.executable) {
true => Some(pid),
false => None,
};
}
// Queries the system to determine the process ID of the running server.
// Returns the process ID, if the server is running.
pub fn get_running_pid(&self) -> Option<u32> {
if let Some(pid) = self.read_pid() {
return match machine::process_at_path_exists(pid, &self.executable) {
true => Some(pid),
false => None,
};
}
if let Some(pid) = machine::find_running_process(&self.executable) {
// attempt to backfill process ID:
self.write_pid(pid).ok();
return Some(pid);
}
if let Some(pid) = machine::find_running_process(&self.executable) {
// attempt to backfill process ID:
self.write_pid(pid).ok();
return Some(pid);
}
None
}
None
}
/// Delete the server directory
pub fn delete(&self) -> Result<(), WrappedError> {
remove_dir_all(&self.server_dir).map_err(|e| {
wrap(
e,
format!("error deleting server dir {}", self.server_dir.display()),
)
})
}
/// Delete the server directory
pub fn delete(&self) -> Result<(), WrappedError> {
remove_dir_all(&self.server_dir).map_err(|e| {
wrap(
e,
format!("error deleting server dir {}", self.server_dir.display()),
)
})
}
// VS Code Server pid
pub fn write_pid(&self, pid: u32) -> Result<(), WrappedError> {
write(&self.pidfile, &format!("{}", pid)).map_err(|e| {
wrap(
e,
format!("error writing process id into {}", self.pidfile.display()),
)
})
}
// VS Code Server pid
pub fn write_pid(&self, pid: u32) -> Result<(), WrappedError> {
write(&self.pidfile, &format!("{}", pid)).map_err(|e| {
wrap(
e,
format!("error writing process id into {}", self.pidfile.display()),
)
})
}
fn read_pid(&self) -> Option<u32> {
read_to_string(&self.pidfile)
.ok()
.and_then(|s| s.parse::<u32>().ok())
}
fn read_pid(&self) -> Option<u32> {
read_to_string(&self.pidfile)
.ok()
.and_then(|s| s.parse::<u32>().ok())
}
}
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct InstalledServer {
pub quality: options::Quality,
pub commit: String,
pub headless: bool,
pub quality: options::Quality,
pub commit: String,
pub headless: bool,
}
impl InstalledServer {
/// Gets path information about where a specific server should be stored.
pub fn server_paths(&self, p: &LauncherPaths) -> ServerPaths {
let base_folder = self.get_install_folder(p);
let server_dir = base_folder.join("bin").join(&self.commit);
ServerPaths {
executable: server_dir
.join("bin")
.join(self.quality.server_entrypoint()),
server_dir,
logfile: base_folder.join(format!(".{}{}", self.commit, LOGFILE_SUFFIX)),
pidfile: base_folder.join(format!(".{}{}", self.commit, PIDFILE_SUFFIX)),
}
}
/// Gets path information about where a specific server should be stored.
pub fn server_paths(&self, p: &LauncherPaths) -> ServerPaths {
let base_folder = self.get_install_folder(p);
let server_dir = base_folder.join("bin").join(&self.commit);
ServerPaths {
executable: server_dir
.join("bin")
.join(self.quality.server_entrypoint()),
server_dir,
logfile: base_folder.join(format!(".{}{}", self.commit, LOGFILE_SUFFIX)),
pidfile: base_folder.join(format!(".{}{}", self.commit, PIDFILE_SUFFIX)),
}
}
fn get_install_folder(&self, p: &LauncherPaths) -> PathBuf {
let name = match self.quality {
options::Quality::Insiders => INSIDERS_INSTALL_FOLDER,
options::Quality::Exploration => EXPLORATION_INSTALL_FOLDER,
options::Quality::Stable => STABLE_INSTALL_FOLDER,
};
fn get_install_folder(&self, p: &LauncherPaths) -> PathBuf {
let name = match self.quality {
options::Quality::Insiders => INSIDERS_INSTALL_FOLDER,
options::Quality::Exploration => EXPLORATION_INSTALL_FOLDER,
options::Quality::Stable => STABLE_INSTALL_FOLDER,
};
p.root().join(if !self.headless {
format!("{}-web", name)
} else {
name.to_string()
})
}
p.root().join(if !self.headless {
format!("{}-web", name)
} else {
name.to_string()
})
}
}
pub struct LastUsedServers<'a> {
state: PersistedState<Vec<InstalledServer>>,
paths: &'a LauncherPaths,
state: PersistedState<Vec<InstalledServer>>,
paths: &'a LauncherPaths,
}
impl<'a> LastUsedServers<'a> {
pub fn new(paths: &'a LauncherPaths) -> LastUsedServers {
LastUsedServers {
state: PersistedState::new(paths.root().join("last-used-servers.json")),
paths,
}
}
pub fn new(paths: &'a LauncherPaths) -> LastUsedServers {
LastUsedServers {
state: PersistedState::new(paths.root().join("last-used-servers.json")),
paths,
}
}
/// Adds a server as having been used most recently. Returns the number of retained server.
pub fn add(&self, server: InstalledServer) -> Result<usize, WrappedError> {
self.state.update_with(server, |server, l| {
if let Some(index) = l.iter().position(|s| s == &server) {
l.remove(index);
}
l.insert(0, server);
l.len()
})
}
/// Adds a server as having been used most recently. Returns the number of retained server.
pub fn add(&self, server: InstalledServer) -> Result<usize, WrappedError> {
self.state.update_with(server, |server, l| {
if let Some(index) = l.iter().position(|s| s == &server) {
l.remove(index);
}
l.insert(0, server);
l.len()
})
}
/// Trims so that at most `max_servers` are saved on disk.
pub fn trim(&self, log: &log::Logger, max_servers: usize) -> Result<(), WrappedError> {
let mut servers = self.state.load();
while servers.len() > max_servers {
let server = servers.pop().unwrap();
debug!(
log,
"Removing old server {}/{}",
server.quality.get_machine_name(),
server.commit
);
let server_paths = server.server_paths(self.paths);
server_paths.delete()?;
}
self.state.save(servers)?;
Ok(())
}
/// Trims so that at most `max_servers` are saved on disk.
pub fn trim(&self, log: &log::Logger, max_servers: usize) -> Result<(), WrappedError> {
let mut servers = self.state.load();
while servers.len() > max_servers {
let server = servers.pop().unwrap();
debug!(
log,
"Removing old server {}/{}",
server.quality.get_machine_name(),
server.commit
);
let server_paths = server.server_paths(self.paths);
server_paths.delete()?;
}
self.state.save(servers)?;
Ok(())
}
}
/// Prunes servers not currently running, and returns the deleted servers.
pub fn prune_stopped_servers(launcher_paths: &LauncherPaths) -> Result<Vec<ServerPaths>, AnyError> {
get_all_servers(launcher_paths)
.into_iter()
.map(|s| s.server_paths(launcher_paths))
.filter(|s| s.get_running_pid().is_none())
.map(|s| s.delete().map(|_| s))
.collect::<Result<_, _>>()
.map_err(AnyError::from)
get_all_servers(launcher_paths)
.into_iter()
.map(|s| s.server_paths(launcher_paths))
.filter(|s| s.get_running_pid().is_none())
.map(|s| s.delete().map(|_| s))
.collect::<Result<_, _>>()
.map_err(AnyError::from)
}
// Gets a list of all servers which look like they might be running.
pub fn get_all_servers(lp: &LauncherPaths) -> Vec<InstalledServer> {
let mut servers: Vec<InstalledServer> = vec![];
let mut server = InstalledServer {
commit: "".to_owned(),
headless: false,
quality: options::Quality::Stable,
};
let mut servers: Vec<InstalledServer> = vec![];
let mut server = InstalledServer {
commit: "".to_owned(),
headless: false,
quality: options::Quality::Stable,
};
add_server_paths_in_folder(lp, &server, &mut servers);
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = true;
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = true;
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = false;
server.quality = options::Quality::Insiders;
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = false;
server.quality = options::Quality::Insiders;
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = true;
add_server_paths_in_folder(lp, &server, &mut servers);
server.headless = true;
add_server_paths_in_folder(lp, &server, &mut servers);
servers
servers
}
fn add_server_paths_in_folder(
lp: &LauncherPaths,
server: &InstalledServer,
servers: &mut Vec<InstalledServer>,
lp: &LauncherPaths,
server: &InstalledServer,
servers: &mut Vec<InstalledServer>,
) {
let dir = server.get_install_folder(lp).join("bin");
if let Ok(children) = read_dir(dir) {
for bin in children.flatten() {
servers.push(InstalledServer {
quality: server.quality,
headless: server.headless,
commit: bin.file_name().to_string_lossy().into(),
});
}
}
let dir = server.get_install_folder(lp).join("bin");
if let Ok(children) = read_dir(dir) {
for bin in children.flatten() {
servers.push(InstalledServer {
quality: server.quality,
headless: server.headless,
commit: bin.file_name().to_string_lossy().into(),
});
}
}
}

View file

@ -8,123 +8,123 @@ use std::collections::HashSet;
use tokio::sync::{mpsc, oneshot};
use crate::{
constants::CONTROL_PORT,
util::errors::{AnyError, CannotForwardControlPort, ServerHasClosed},
constants::CONTROL_PORT,
util::errors::{AnyError, CannotForwardControlPort, ServerHasClosed},
};
use super::dev_tunnels::ActiveTunnel;
pub enum PortForwardingRec {
Forward(u16, oneshot::Sender<Result<String, AnyError>>),
Unforward(u16, oneshot::Sender<Result<(), AnyError>>),
Forward(u16, oneshot::Sender<Result<String, AnyError>>),
Unforward(u16, oneshot::Sender<Result<(), AnyError>>),
}
/// Provides a port forwarding service for connected clients. Clients can make
/// requests on it, which are (and *must be*) processed by calling the `.process()`
/// method on the forwarder.
pub struct PortForwardingProcessor {
tx: mpsc::Sender<PortForwardingRec>,
rx: mpsc::Receiver<PortForwardingRec>,
forwarded: HashSet<u16>,
tx: mpsc::Sender<PortForwardingRec>,
rx: mpsc::Receiver<PortForwardingRec>,
forwarded: HashSet<u16>,
}
impl PortForwardingProcessor {
pub fn new() -> Self {
let (tx, rx) = mpsc::channel(8);
Self {
tx,
rx,
forwarded: HashSet::new(),
}
}
pub fn new() -> Self {
let (tx, rx) = mpsc::channel(8);
Self {
tx,
rx,
forwarded: HashSet::new(),
}
}
/// Gets a handle that can be passed off to consumers of port forwarding.
pub fn handle(&self) -> PortForwarding {
PortForwarding {
tx: self.tx.clone(),
}
}
/// Gets a handle that can be passed off to consumers of port forwarding.
pub fn handle(&self) -> PortForwarding {
PortForwarding {
tx: self.tx.clone(),
}
}
/// Receives port forwarding requests. Consumers MUST call `process()`
/// with the received requests.
pub async fn recv(&mut self) -> Option<PortForwardingRec> {
self.rx.recv().await
}
/// Receives port forwarding requests. Consumers MUST call `process()`
/// with the received requests.
pub async fn recv(&mut self) -> Option<PortForwardingRec> {
self.rx.recv().await
}
/// Processes the incoming forwarding request.
pub async fn process(&mut self, req: PortForwardingRec, tunnel: &mut ActiveTunnel) {
match req {
PortForwardingRec::Forward(port, tx) => {
tx.send(self.process_forward(port, tunnel).await).ok();
}
PortForwardingRec::Unforward(port, tx) => {
tx.send(self.process_unforward(port, tunnel).await).ok();
}
}
}
/// Processes the incoming forwarding request.
pub async fn process(&mut self, req: PortForwardingRec, tunnel: &mut ActiveTunnel) {
match req {
PortForwardingRec::Forward(port, tx) => {
tx.send(self.process_forward(port, tunnel).await).ok();
}
PortForwardingRec::Unforward(port, tx) => {
tx.send(self.process_unforward(port, tunnel).await).ok();
}
}
}
async fn process_unforward(
&mut self,
port: u16,
tunnel: &mut ActiveTunnel,
) -> Result<(), AnyError> {
if port == CONTROL_PORT {
return Err(CannotForwardControlPort().into());
}
async fn process_unforward(
&mut self,
port: u16,
tunnel: &mut ActiveTunnel,
) -> Result<(), AnyError> {
if port == CONTROL_PORT {
return Err(CannotForwardControlPort().into());
}
tunnel.remove_port(port).await?;
self.forwarded.remove(&port);
Ok(())
}
tunnel.remove_port(port).await?;
self.forwarded.remove(&port);
Ok(())
}
async fn process_forward(
&mut self,
port: u16,
tunnel: &mut ActiveTunnel,
) -> Result<String, AnyError> {
if port == CONTROL_PORT {
return Err(CannotForwardControlPort().into());
}
async fn process_forward(
&mut self,
port: u16,
tunnel: &mut ActiveTunnel,
) -> Result<String, AnyError> {
if port == CONTROL_PORT {
return Err(CannotForwardControlPort().into());
}
if !self.forwarded.contains(&port) {
tunnel.add_port_tcp(port).await?;
self.forwarded.insert(port);
}
if !self.forwarded.contains(&port) {
tunnel.add_port_tcp(port).await?;
self.forwarded.insert(port);
}
tunnel.get_port_uri(port).await
}
tunnel.get_port_uri(port).await
}
}
pub struct PortForwarding {
tx: mpsc::Sender<PortForwardingRec>,
tx: mpsc::Sender<PortForwardingRec>,
}
impl PortForwarding {
pub async fn forward(&self, port: u16) -> Result<String, AnyError> {
let (tx, rx) = oneshot::channel();
let req = PortForwardingRec::Forward(port, tx);
pub async fn forward(&self, port: u16) -> Result<String, AnyError> {
let (tx, rx) = oneshot::channel();
let req = PortForwardingRec::Forward(port, tx);
if self.tx.send(req).await.is_err() {
return Err(ServerHasClosed().into());
}
if self.tx.send(req).await.is_err() {
return Err(ServerHasClosed().into());
}
match rx.await {
Ok(r) => r,
Err(_) => Err(ServerHasClosed().into()),
}
}
match rx.await {
Ok(r) => r,
Err(_) => Err(ServerHasClosed().into()),
}
}
pub async fn unforward(&self, port: u16) -> Result<(), AnyError> {
let (tx, rx) = oneshot::channel();
let req = PortForwardingRec::Unforward(port, tx);
pub async fn unforward(&self, port: u16) -> Result<(), AnyError> {
let (tx, rx) = oneshot::channel();
let req = PortForwardingRec::Unforward(port, tx);
if self.tx.send(req).await.is_err() {
return Err(ServerHasClosed().into());
}
if self.tx.send(req).await.is_err() {
return Err(ServerHasClosed().into());
}
match rx.await {
Ok(r) => r,
Err(_) => Err(ServerHasClosed().into()),
}
}
match rx.await {
Ok(r) => r,
Err(_) => Err(ServerHasClosed().into()),
}
}
}

View file

@ -11,47 +11,47 @@ use serde::{Deserialize, Serialize};
#[serde(tag = "method", content = "params")]
#[allow(non_camel_case_types)]
pub enum ServerRequestMethod {
serve(ServeParams),
prune,
ping(EmptyResult),
forward(ForwardParams),
unforward(UnforwardParams),
gethostname(EmptyResult),
update(UpdateParams),
servermsg(ServerMessageParams),
callserverhttp(CallServerHttpParams),
serve(ServeParams),
prune,
ping(EmptyResult),
forward(ForwardParams),
unforward(UnforwardParams),
gethostname(EmptyResult),
update(UpdateParams),
servermsg(ServerMessageParams),
callserverhttp(CallServerHttpParams),
}
#[derive(Serialize, Debug)]
#[serde(tag = "method", content = "params", rename_all = "camelCase")]
#[allow(non_camel_case_types)]
pub enum ClientRequestMethod<'a> {
servermsg(RefServerMessageParams<'a>),
serverlog(ServerLog<'a>),
version(VersionParams),
servermsg(RefServerMessageParams<'a>),
serverlog(ServerLog<'a>),
version(VersionParams),
}
#[derive(Deserialize, Debug)]
pub struct ForwardParams {
pub port: u16,
pub port: u16,
}
#[derive(Deserialize, Debug)]
pub struct UnforwardParams {
pub port: u16,
pub port: u16,
}
#[derive(Serialize)]
pub struct ForwardResult {
pub uri: String,
pub uri: String,
}
#[derive(Deserialize, Debug)]
pub struct ServeParams {
pub socket_id: u16,
pub commit_id: Option<String>,
pub quality: Quality,
pub extensions: Vec<String>,
pub socket_id: u16,
pub commit_id: Option<String>,
pub quality: Quality,
pub extensions: Vec<String>,
}
#[derive(Deserialize, Serialize, Debug)]
@ -59,93 +59,93 @@ pub struct EmptyResult {}
#[derive(Serialize, Deserialize, Debug)]
pub struct UpdateParams {
pub do_update: bool,
pub do_update: bool,
}
#[derive(Deserialize, Debug)]
pub struct ServerMessageParams {
pub i: u16,
#[serde(with = "serde_bytes")]
pub body: Vec<u8>,
pub i: u16,
#[serde(with = "serde_bytes")]
pub body: Vec<u8>,
}
#[derive(Serialize, Debug)]
pub struct RefServerMessageParams<'a> {
pub i: u16,
#[serde(with = "serde_bytes")]
pub body: &'a [u8],
pub i: u16,
#[serde(with = "serde_bytes")]
pub body: &'a [u8],
}
#[derive(Serialize)]
pub struct UpdateResult {
pub up_to_date: bool,
pub did_update: bool,
pub up_to_date: bool,
pub did_update: bool,
}
#[derive(Deserialize, Debug)]
pub struct ToServerRequest {
pub id: Option<u8>,
#[serde(flatten)]
pub params: ServerRequestMethod,
pub id: Option<u8>,
#[serde(flatten)]
pub params: ServerRequestMethod,
}
#[derive(Serialize, Debug)]
pub struct ToClientRequest<'a> {
pub id: Option<u8>,
#[serde(flatten)]
pub params: ClientRequestMethod<'a>,
pub id: Option<u8>,
#[serde(flatten)]
pub params: ClientRequestMethod<'a>,
}
#[derive(Serialize, Deserialize)]
pub struct SuccessResponse<T>
where
T: Serialize,
T: Serialize,
{
pub id: u8,
pub result: T,
pub id: u8,
pub result: T,
}
#[derive(Serialize, Deserialize)]
pub struct ErrorResponse {
pub id: u8,
pub error: ResponseError,
pub id: u8,
pub error: ResponseError,
}
#[derive(Serialize, Deserialize)]
pub struct ResponseError {
pub code: i32,
pub message: String,
pub code: i32,
pub message: String,
}
#[derive(Debug, Default, Serialize)]
pub struct ServerLog<'a> {
pub line: &'a str,
pub level: u8,
pub line: &'a str,
pub level: u8,
}
#[derive(Serialize)]
pub struct GetHostnameResponse {
pub value: String,
pub value: String,
}
#[derive(Deserialize, Debug)]
pub struct CallServerHttpParams {
pub path: String,
pub method: String,
pub headers: HashMap<String, String>,
pub body: Option<Vec<u8>>,
pub path: String,
pub method: String,
pub headers: HashMap<String, String>,
pub body: Option<Vec<u8>>,
}
#[derive(Serialize)]
pub struct CallServerHttpResult {
pub status: u16,
#[serde(with = "serde_bytes")]
pub body: Vec<u8>,
pub headers: HashMap<String, String>,
pub status: u16,
#[serde(with = "serde_bytes")]
pub body: Vec<u8>,
pub headers: HashMap<String, String>,
}
#[derive(Serialize, Debug)]
pub struct VersionParams {
pub version: &'static str,
pub protocol_version: u32,
pub version: &'static str,
pub protocol_version: u32,
}

View file

@ -5,76 +5,76 @@
use std::path::Path;
use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::{unix::OwnedWriteHalf, UnixStream},
sync::mpsc::Sender,
io::{AsyncReadExt, AsyncWriteExt},
net::{unix::OwnedWriteHalf, UnixStream},
sync::mpsc::Sender,
};
use crate::util::errors::{wrap, AnyError};
pub struct ServerBridge {
write: OwnedWriteHalf,
write: OwnedWriteHalf,
}
pub trait FromServerMessage {
fn from_server_message(index: u16, message: &[u8]) -> Self;
fn from_closed_server_bridge(i: u16) -> Self;
fn from_server_message(index: u16, message: &[u8]) -> Self;
fn from_closed_server_bridge(i: u16) -> Self;
}
pub async fn get_socket_rw_stream(path: &Path) -> Result<UnixStream, AnyError> {
let s = UnixStream::connect(path).await.map_err(|e| {
wrap(
e,
format!(
"error connecting to vscode server socket in {}",
path.display()
),
)
})?;
let s = UnixStream::connect(path).await.map_err(|e| {
wrap(
e,
format!(
"error connecting to vscode server socket in {}",
path.display()
),
)
})?;
Ok(s)
Ok(s)
}
const BUFFER_SIZE: usize = 65536;
impl ServerBridge {
pub async fn new<T>(path: &Path, index: u16, target: &Sender<T>) -> Result<Self, AnyError>
where
T: 'static + FromServerMessage + Send,
{
let stream = get_socket_rw_stream(path).await?;
let (mut read, write) = stream.into_split();
pub async fn new<T>(path: &Path, index: u16, target: &Sender<T>) -> Result<Self, AnyError>
where
T: 'static + FromServerMessage + Send,
{
let stream = get_socket_rw_stream(path).await?;
let (mut read, write) = stream.into_split();
let tx = target.clone();
tokio::spawn(async move {
let mut read_buf = vec![0; BUFFER_SIZE];
loop {
match read.read(&mut read_buf).await {
Err(_) => return,
Ok(0) => {
let _ = tx.send(T::from_closed_server_bridge(index)).await;
return; // EOF
}
Ok(s) => {
let send = tx.send(T::from_server_message(index, &read_buf[..s])).await;
if send.is_err() {
return;
}
}
}
}
});
let tx = target.clone();
tokio::spawn(async move {
let mut read_buf = vec![0; BUFFER_SIZE];
loop {
match read.read(&mut read_buf).await {
Err(_) => return,
Ok(0) => {
let _ = tx.send(T::from_closed_server_bridge(index)).await;
return; // EOF
}
Ok(s) => {
let send = tx.send(T::from_server_message(index, &read_buf[..s])).await;
if send.is_err() {
return;
}
}
}
}
});
Ok(ServerBridge { write })
}
Ok(ServerBridge { write })
}
pub async fn write(&mut self, b: Vec<u8>) -> std::io::Result<()> {
self.write.write_all(&b).await?;
Ok(())
}
pub async fn write(&mut self, b: Vec<u8>) -> std::io::Result<()> {
self.write.write_all(&b).await?;
Ok(())
}
pub async fn close(mut self) -> std::io::Result<()> {
self.write.shutdown().await?;
Ok(())
}
pub async fn close(mut self) -> std::io::Result<()> {
self.write.shutdown().await?;
Ok(())
}
}

View file

@ -6,128 +6,128 @@
use std::{path::Path, time::Duration};
use tokio::{
io::{self, Interest},
net::windows::named_pipe::{ClientOptions, NamedPipeClient},
sync::mpsc,
time::sleep,
io::{self, Interest},
net::windows::named_pipe::{ClientOptions, NamedPipeClient},
sync::mpsc,
time::sleep,
};
use crate::util::errors::{wrap, AnyError};
pub struct ServerBridge {
write_tx: mpsc::Sender<Vec<u8>>,
write_tx: mpsc::Sender<Vec<u8>>,
}
pub trait FromServerMessage {
fn from_server_message(index: u16, message: &[u8]) -> Self;
fn from_closed_server_bridge(i: u16) -> Self;
fn from_server_message(index: u16, message: &[u8]) -> Self;
fn from_closed_server_bridge(i: u16) -> Self;
}
const BUFFER_SIZE: usize = 65536;
pub async fn get_socket_rw_stream(path: &Path) -> Result<NamedPipeClient, AnyError> {
// Tokio says we can need to try in a loop. Do so.
// https://docs.rs/tokio/latest/tokio/net/windows/named_pipe/struct.NamedPipeClient.html
let client = loop {
match ClientOptions::new().open(path) {
Ok(client) => break client,
// ERROR_PIPE_BUSY https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
Err(e) if e.raw_os_error() == Some(231) => sleep(Duration::from_millis(100)).await,
Err(e) => {
return Err(AnyError::WrappedError(wrap(
e,
format!(
"error connecting to vscode server socket in {}",
path.display()
),
)))
}
}
};
// Tokio says we can need to try in a loop. Do so.
// https://docs.rs/tokio/latest/tokio/net/windows/named_pipe/struct.NamedPipeClient.html
let client = loop {
match ClientOptions::new().open(path) {
Ok(client) => break client,
// ERROR_PIPE_BUSY https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499-
Err(e) if e.raw_os_error() == Some(231) => sleep(Duration::from_millis(100)).await,
Err(e) => {
return Err(AnyError::WrappedError(wrap(
e,
format!(
"error connecting to vscode server socket in {}",
path.display()
),
)))
}
}
};
Ok(client)
Ok(client)
}
impl ServerBridge {
pub async fn new<T>(path: &Path, index: u16, target: &mpsc::Sender<T>) -> Result<Self, AnyError>
where
T: 'static + FromServerMessage + Send,
{
let client = get_socket_rw_stream(path).await?;
let (write_tx, mut write_rx) = mpsc::channel(4);
let read_tx = target.clone();
tokio::spawn(async move {
let mut read_buf = vec![0; BUFFER_SIZE];
let mut pending_recv: Option<Vec<u8>> = None;
pub async fn new<T>(path: &Path, index: u16, target: &mpsc::Sender<T>) -> Result<Self, AnyError>
where
T: 'static + FromServerMessage + Send,
{
let client = get_socket_rw_stream(path).await?;
let (write_tx, mut write_rx) = mpsc::channel(4);
let read_tx = target.clone();
tokio::spawn(async move {
let mut read_buf = vec![0; BUFFER_SIZE];
let mut pending_recv: Option<Vec<u8>> = None;
// See https://docs.rs/tokio/1.17.0/tokio/net/windows/named_pipe/struct.NamedPipeClient.html#method.ready
// With additional complications. If there's nothing queued to write, we wait for the
// pipe to be readable, or for something to come in. If there is something to
// write, wait until the pipe is either readable or writable.
loop {
let ready_result = if pending_recv.is_none() {
tokio::select! {
msg = write_rx.recv() => match msg {
Some(msg) => {
pending_recv = Some(msg);
client.ready(Interest::READABLE | Interest::WRITABLE).await
},
None => return
},
r = client.ready(Interest::READABLE) => r,
}
} else {
client.ready(Interest::READABLE | Interest::WRITABLE).await
};
// See https://docs.rs/tokio/1.17.0/tokio/net/windows/named_pipe/struct.NamedPipeClient.html#method.ready
// With additional complications. If there's nothing queued to write, we wait for the
// pipe to be readable, or for something to come in. If there is something to
// write, wait until the pipe is either readable or writable.
loop {
let ready_result = if pending_recv.is_none() {
tokio::select! {
msg = write_rx.recv() => match msg {
Some(msg) => {
pending_recv = Some(msg);
client.ready(Interest::READABLE | Interest::WRITABLE).await
},
None => return
},
r = client.ready(Interest::READABLE) => r,
}
} else {
client.ready(Interest::READABLE | Interest::WRITABLE).await
};
let ready = match ready_result {
Ok(r) => r,
Err(_) => return,
};
let ready = match ready_result {
Ok(r) => r,
Err(_) => return,
};
if ready.is_readable() {
match client.try_read(&mut read_buf) {
Ok(0) => return, // EOF
Ok(s) => {
let send = read_tx
.send(T::from_server_message(index, &read_buf[..s]))
.await;
if send.is_err() {
return;
}
}
Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {
continue;
}
Err(_) => return,
}
}
if ready.is_readable() {
match client.try_read(&mut read_buf) {
Ok(0) => return, // EOF
Ok(s) => {
let send = read_tx
.send(T::from_server_message(index, &read_buf[..s]))
.await;
if send.is_err() {
return;
}
}
Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => {
continue;
}
Err(_) => return,
}
}
if let Some(msg) = &pending_recv {
if ready.is_writable() {
match client.try_write(msg) {
Ok(n) if n == msg.len() => pending_recv = None,
Ok(n) => pending_recv = Some(msg[n..].to_vec()),
Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
continue;
}
Err(_) => return,
}
}
}
}
});
if let Some(msg) = &pending_recv {
if ready.is_writable() {
match client.try_write(msg) {
Ok(n) if n == msg.len() => pending_recv = None,
Ok(n) => pending_recv = Some(msg[n..].to_vec()),
Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
continue;
}
Err(_) => return,
}
}
}
}
});
Ok(ServerBridge { write_tx })
}
Ok(ServerBridge { write_tx })
}
pub async fn write(&self, b: Vec<u8>) -> std::io::Result<()> {
self.write_tx.send(b).await.ok();
Ok(())
}
pub async fn write(&self, b: Vec<u8>) -> std::io::Result<()> {
self.write_tx.send(b).await.ok();
Ok(())
}
pub async fn close(self) -> std::io::Result<()> {
drop(self.write_tx);
Ok(())
}
pub async fn close(self) -> std::io::Result<()> {
drop(self.write_tx);
Ok(())
}
}

View file

@ -16,30 +16,30 @@ pub const SERVICE_LOG_FILE_NAME: &str = "tunnel-service.log";
#[async_trait]
pub trait ServiceContainer: Send {
async fn run_service(
&mut self,
log: log::Logger,
launcher_paths: LauncherPaths,
shutdown_rx: oneshot::Receiver<()>,
) -> Result<(), AnyError>;
async fn run_service(
&mut self,
log: log::Logger,
launcher_paths: LauncherPaths,
shutdown_rx: oneshot::Receiver<()>,
) -> Result<(), AnyError>;
}
pub trait ServiceManager {
/// Registers the current executable as a service to run with the given set
/// of arguments.
fn register(&self, exe: PathBuf, args: &[&str]) -> Result<(), AnyError>;
/// Registers the current executable as a service to run with the given set
/// of arguments.
fn register(&self, exe: PathBuf, args: &[&str]) -> Result<(), AnyError>;
/// Runs the service using the given handle. The executable *must not* take
/// any action which may fail prior to calling this to ensure service
/// states may update.
fn run(
&self,
launcher_paths: LauncherPaths,
handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError>;
/// Runs the service using the given handle. The executable *must not* take
/// any action which may fail prior to calling this to ensure service
/// states may update.
fn run(
&self,
launcher_paths: LauncherPaths,
handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError>;
/// Unregisters the current executable as a service.
fn unregister(&self) -> Result<(), AnyError>;
/// Unregisters the current executable as a service.
fn unregister(&self) -> Result<(), AnyError>;
}
#[cfg(target_os = "windows")]
@ -50,32 +50,32 @@ pub type ServiceManagerImpl = UnimplementedServiceManager;
#[allow(unreachable_code)]
pub fn create_service_manager(log: log::Logger) -> ServiceManagerImpl {
ServiceManagerImpl::new(log)
ServiceManagerImpl::new(log)
}
pub struct UnimplementedServiceManager();
#[allow(dead_code)]
impl UnimplementedServiceManager {
fn new(_log: log::Logger) -> Self {
Self()
}
fn new(_log: log::Logger) -> Self {
Self()
}
}
impl ServiceManager for UnimplementedServiceManager {
fn register(&self, _exe: PathBuf, _args: &[&str]) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
fn register(&self, _exe: PathBuf, _args: &[&str]) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
fn run(
&self,
_launcher_paths: LauncherPaths,
_handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
fn run(
&self,
_launcher_paths: LauncherPaths,
_handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
fn unregister(&self) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
fn unregister(&self) -> Result<(), AnyError> {
unimplemented!("Service management is not supported on this platform");
}
}

View file

@ -8,271 +8,271 @@ use lazy_static::lazy_static;
use std::{ffi::OsString, sync::Mutex, thread, time::Duration};
use tokio::sync::oneshot;
use windows_service::{
define_windows_service,
service::{
ServiceAccess, ServiceControl, ServiceControlAccept, ServiceErrorControl, ServiceExitCode,
ServiceInfo, ServiceStartType, ServiceState, ServiceStatus, ServiceType,
},
service_control_handler::{self, ServiceControlHandlerResult},
service_dispatcher,
service_manager::{ServiceManager, ServiceManagerAccess},
define_windows_service,
service::{
ServiceAccess, ServiceControl, ServiceControlAccept, ServiceErrorControl, ServiceExitCode,
ServiceInfo, ServiceStartType, ServiceState, ServiceStatus, ServiceType,
},
service_control_handler::{self, ServiceControlHandlerResult},
service_dispatcher,
service_manager::{ServiceManager, ServiceManagerAccess},
};
use crate::util::errors::{wrap, AnyError, WindowsNeedsElevation};
use crate::{
log::{self, FileLogSink},
state::LauncherPaths,
log::{self, FileLogSink},
state::LauncherPaths,
};
use super::service::{
ServiceContainer, ServiceManager as CliServiceManager, SERVICE_LOG_FILE_NAME,
ServiceContainer, ServiceManager as CliServiceManager, SERVICE_LOG_FILE_NAME,
};
pub struct WindowsService {
log: log::Logger,
log: log::Logger,
}
const SERVICE_NAME: &str = "code_tunnel";
const SERVICE_TYPE: ServiceType = ServiceType::OWN_PROCESS;
impl WindowsService {
pub fn new(log: log::Logger) -> Self {
Self { log }
}
pub fn new(log: log::Logger) -> Self {
Self { log }
}
}
impl CliServiceManager for WindowsService {
fn register(&self, exe: std::path::PathBuf, args: &[&str]) -> Result<(), AnyError> {
let service_manager = ServiceManager::local_computer(
None::<&str>,
ServiceManagerAccess::CONNECT | ServiceManagerAccess::CREATE_SERVICE,
)
.map_err(|e| WindowsNeedsElevation(format!("error getting service manager: {}", e)))?;
fn register(&self, exe: std::path::PathBuf, args: &[&str]) -> Result<(), AnyError> {
let service_manager = ServiceManager::local_computer(
None::<&str>,
ServiceManagerAccess::CONNECT | ServiceManagerAccess::CREATE_SERVICE,
)
.map_err(|e| WindowsNeedsElevation(format!("error getting service manager: {}", e)))?;
let mut service_info = ServiceInfo {
name: OsString::from(SERVICE_NAME),
display_name: OsString::from("VS Code Tunnel"),
service_type: SERVICE_TYPE,
start_type: ServiceStartType::AutoStart,
error_control: ServiceErrorControl::Normal,
executable_path: exe,
launch_arguments: args.iter().map(OsString::from).collect(),
dependencies: vec![],
account_name: None,
account_password: None,
};
let mut service_info = ServiceInfo {
name: OsString::from(SERVICE_NAME),
display_name: OsString::from("VS Code Tunnel"),
service_type: SERVICE_TYPE,
start_type: ServiceStartType::AutoStart,
error_control: ServiceErrorControl::Normal,
executable_path: exe,
launch_arguments: args.iter().map(OsString::from).collect(),
dependencies: vec![],
account_name: None,
account_password: None,
};
let existing_service = service_manager.open_service(
SERVICE_NAME,
ServiceAccess::QUERY_STATUS | ServiceAccess::START | ServiceAccess::CHANGE_CONFIG,
);
let service = if let Ok(service) = existing_service {
service
.change_config(&service_info)
.map_err(|e| wrap(e, "error updating existing service"))?;
service
} else {
loop {
let (username, password) = prompt_credentials()?;
service_info.account_name = Some(format!(".\\{}", username).into());
service_info.account_password = Some(password.into());
let existing_service = service_manager.open_service(
SERVICE_NAME,
ServiceAccess::QUERY_STATUS | ServiceAccess::START | ServiceAccess::CHANGE_CONFIG,
);
let service = if let Ok(service) = existing_service {
service
.change_config(&service_info)
.map_err(|e| wrap(e, "error updating existing service"))?;
service
} else {
loop {
let (username, password) = prompt_credentials()?;
service_info.account_name = Some(format!(".\\{}", username).into());
service_info.account_password = Some(password.into());
match service_manager.create_service(
&service_info,
ServiceAccess::CHANGE_CONFIG | ServiceAccess::START,
) {
Ok(service) => break service,
Err(windows_service::Error::Winapi(e)) if Some(1057) == e.raw_os_error() => {
error!(
self.log,
"Invalid username or password, please try again..."
);
}
Err(e) => return Err(wrap(e, "error registering service").into()),
}
}
};
match service_manager.create_service(
&service_info,
ServiceAccess::CHANGE_CONFIG | ServiceAccess::START,
) {
Ok(service) => break service,
Err(windows_service::Error::Winapi(e)) if Some(1057) == e.raw_os_error() => {
error!(
self.log,
"Invalid username or password, please try again..."
);
}
Err(e) => return Err(wrap(e, "error registering service").into()),
}
}
};
service
.set_description("Service that runs `code tunnel` for access on vscode.dev")
.ok();
service
.set_description("Service that runs `code tunnel` for access on vscode.dev")
.ok();
info!(self.log, "Successfully registered service...");
info!(self.log, "Successfully registered service...");
let status = service
.query_status()
.map(|s| s.current_state)
.unwrap_or(ServiceState::Stopped);
let status = service
.query_status()
.map(|s| s.current_state)
.unwrap_or(ServiceState::Stopped);
if status == ServiceState::Stopped {
service
.start::<&str>(&[])
.map_err(|e| wrap(e, "error starting service"))?;
}
if status == ServiceState::Stopped {
service
.start::<&str>(&[])
.map_err(|e| wrap(e, "error starting service"))?;
}
info!(self.log, "Tunnel service successfully started");
Ok(())
}
info!(self.log, "Tunnel service successfully started");
Ok(())
}
#[allow(unused_must_use)] // triggers incorrectly on `define_windows_service!`
fn run(
&self,
launcher_paths: LauncherPaths,
handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError> {
let log = match FileLogSink::new(
log::Level::Debug,
&launcher_paths.root().join(SERVICE_LOG_FILE_NAME),
) {
Ok(sink) => self.log.tee(sink),
Err(e) => {
warning!(self.log, "Failed to create service log file: {}", e);
self.log.clone()
}
};
#[allow(unused_must_use)] // triggers incorrectly on `define_windows_service!`
fn run(
&self,
launcher_paths: LauncherPaths,
handle: impl 'static + ServiceContainer,
) -> Result<(), AnyError> {
let log = match FileLogSink::new(
log::Level::Debug,
&launcher_paths.root().join(SERVICE_LOG_FILE_NAME),
) {
Ok(sink) => self.log.tee(sink),
Err(e) => {
warning!(self.log, "Failed to create service log file: {}", e);
self.log.clone()
}
};
// We put the handle into the global "impl" type and then take it out in
// my_service_main. This is needed just since we have to have that
// function at the root level, but need to pass in data later here...
SERVICE_IMPL.lock().unwrap().replace(ServiceImpl {
container: Box::new(handle),
launcher_paths,
log,
});
// We put the handle into the global "impl" type and then take it out in
// my_service_main. This is needed just since we have to have that
// function at the root level, but need to pass in data later here...
SERVICE_IMPL.lock().unwrap().replace(ServiceImpl {
container: Box::new(handle),
launcher_paths,
log,
});
define_windows_service!(ffi_service_main, service_main);
define_windows_service!(ffi_service_main, service_main);
service_dispatcher::start(SERVICE_NAME, ffi_service_main)
.map_err(|e| wrap(e, "error starting service dispatcher").into())
}
service_dispatcher::start(SERVICE_NAME, ffi_service_main)
.map_err(|e| wrap(e, "error starting service dispatcher").into())
}
fn unregister(&self) -> Result<(), AnyError> {
let service_manager =
ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CONNECT)
.map_err(|e| wrap(e, "error getting service manager"))?;
fn unregister(&self) -> Result<(), AnyError> {
let service_manager =
ServiceManager::local_computer(None::<&str>, ServiceManagerAccess::CONNECT)
.map_err(|e| wrap(e, "error getting service manager"))?;
let service = service_manager.open_service(
SERVICE_NAME,
ServiceAccess::QUERY_STATUS | ServiceAccess::STOP | ServiceAccess::DELETE,
);
let service = service_manager.open_service(
SERVICE_NAME,
ServiceAccess::QUERY_STATUS | ServiceAccess::STOP | ServiceAccess::DELETE,
);
let service = match service {
Ok(service) => service,
// Service does not exist:
Err(windows_service::Error::Winapi(e)) if Some(1060) == e.raw_os_error() => {
return Ok(())
}
Err(e) => return Err(wrap(e, "error getting service handle").into()),
};
let service = match service {
Ok(service) => service,
// Service does not exist:
Err(windows_service::Error::Winapi(e)) if Some(1060) == e.raw_os_error() => {
return Ok(())
}
Err(e) => return Err(wrap(e, "error getting service handle").into()),
};
let service_status = service
.query_status()
.map_err(|e| wrap(e, "error getting service status"))?;
let service_status = service
.query_status()
.map_err(|e| wrap(e, "error getting service status"))?;
if service_status.current_state != ServiceState::Stopped {
service
.stop()
.map_err(|e| wrap(e, "error getting stopping service"))?;
if service_status.current_state != ServiceState::Stopped {
service
.stop()
.map_err(|e| wrap(e, "error getting stopping service"))?;
while let Ok(ServiceState::Stopped) = service.query_status().map(|s| s.current_state) {
info!(self.log, "Polling for service to stop...");
thread::sleep(Duration::from_secs(1));
}
}
while let Ok(ServiceState::Stopped) = service.query_status().map(|s| s.current_state) {
info!(self.log, "Polling for service to stop...");
thread::sleep(Duration::from_secs(1));
}
}
service
.delete()
.map_err(|e| wrap(e, "error deleting service"))?;
service
.delete()
.map_err(|e| wrap(e, "error deleting service"))?;
Ok(())
}
Ok(())
}
}
struct ServiceImpl {
container: Box<dyn ServiceContainer>,
launcher_paths: LauncherPaths,
log: log::Logger,
container: Box<dyn ServiceContainer>,
launcher_paths: LauncherPaths,
log: log::Logger,
}
lazy_static! {
static ref SERVICE_IMPL: Mutex<Option<ServiceImpl>> = Mutex::new(None);
static ref SERVICE_IMPL: Mutex<Option<ServiceImpl>> = Mutex::new(None);
}
/// "main" function that the service calls in its own thread.
fn service_main(_arguments: Vec<OsString>) -> Result<(), AnyError> {
let mut service = SERVICE_IMPL.lock().unwrap().take().unwrap();
let mut service = SERVICE_IMPL.lock().unwrap().take().unwrap();
// Create a channel to be able to poll a stop event from the service worker loop.
let (shutdown_tx, shutdown_rx) = oneshot::channel();
let mut shutdown_tx = Some(shutdown_tx);
// Create a channel to be able to poll a stop event from the service worker loop.
let (shutdown_tx, shutdown_rx) = oneshot::channel();
let mut shutdown_tx = Some(shutdown_tx);
// Define system service event handler that will be receiving service events.
let event_handler = move |control_event| -> ServiceControlHandlerResult {
match control_event {
ServiceControl::Interrogate => ServiceControlHandlerResult::NoError,
ServiceControl::Stop => {
shutdown_tx.take().and_then(|tx| tx.send(()).ok());
ServiceControlHandlerResult::NoError
}
// Define system service event handler that will be receiving service events.
let event_handler = move |control_event| -> ServiceControlHandlerResult {
match control_event {
ServiceControl::Interrogate => ServiceControlHandlerResult::NoError,
ServiceControl::Stop => {
shutdown_tx.take().and_then(|tx| tx.send(()).ok());
ServiceControlHandlerResult::NoError
}
_ => ServiceControlHandlerResult::NotImplemented,
}
};
_ => ServiceControlHandlerResult::NotImplemented,
}
};
let status_handle = service_control_handler::register(SERVICE_NAME, event_handler)
.map_err(|e| wrap(e, "error registering service event handler"))?;
let status_handle = service_control_handler::register(SERVICE_NAME, event_handler)
.map_err(|e| wrap(e, "error registering service event handler"))?;
// Tell the system that service is running
status_handle
.set_service_status(ServiceStatus {
service_type: SERVICE_TYPE,
current_state: ServiceState::Running,
controls_accepted: ServiceControlAccept::STOP,
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})
.map_err(|e| wrap(e, "error marking service as running"))?;
// Tell the system that service is running
status_handle
.set_service_status(ServiceStatus {
service_type: SERVICE_TYPE,
current_state: ServiceState::Running,
controls_accepted: ServiceControlAccept::STOP,
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})
.map_err(|e| wrap(e, "error marking service as running"))?;
let result = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(
service
.container
.run_service(service.log, service.launcher_paths, shutdown_rx),
);
let result = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(
service
.container
.run_service(service.log, service.launcher_paths, shutdown_rx),
);
status_handle
.set_service_status(ServiceStatus {
service_type: SERVICE_TYPE,
current_state: ServiceState::Stopped,
controls_accepted: ServiceControlAccept::empty(),
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})
.map_err(|e| wrap(e, "error marking service as stopped"))?;
status_handle
.set_service_status(ServiceStatus {
service_type: SERVICE_TYPE,
current_state: ServiceState::Stopped,
controls_accepted: ServiceControlAccept::empty(),
exit_code: ServiceExitCode::Win32(0),
checkpoint: 0,
wait_hint: Duration::default(),
process_id: None,
})
.map_err(|e| wrap(e, "error marking service as stopped"))?;
result
result
}
fn prompt_credentials() -> Result<(String, String), AnyError> {
println!("Running a Windows service under your user requires your username and password.");
println!("These are sent to the Windows Service Manager and are not stored by VS Code.");
println!("Running a Windows service under your user requires your username and password.");
println!("These are sent to the Windows Service Manager and are not stored by VS Code.");
let username: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Windows username:")
.interact_text()
.map_err(|e| wrap(e, "Failed to read username"))?;
let username: String = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Windows username:")
.interact_text()
.map_err(|e| wrap(e, "Failed to read username"))?;
let password = Password::with_theme(&ColorfulTheme::default())
.with_prompt("Windows password:")
.interact()
.map_err(|e| wrap(e, "Failed to read password"))?;
let password = Password::with_theme(&ColorfulTheme::default())
.with_prompt("Windows password:")
.interact()
.map_err(|e| wrap(e, "Failed to read password"))?;
Ok((username, password))
Ok((username, password))
}

View file

@ -3,118 +3,118 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
use crate::constants::{LAUNCHER_ASSET_NAME, LAUNCHER_VERSION};
use crate::constants::{VSCODE_CLI_ASSET_NAME, VSCODE_CLI_VERSION};
use crate::util::{errors, http, io::SilentCopyProgress};
use serde::Deserialize;
use std::{
fs::{rename, set_permissions},
path::Path,
fs::{rename, set_permissions},
path::Path,
};
pub struct Update {
client: reqwest::Client,
client: reqwest::Client,
}
const LATEST_URL: &str = "https://aka.ms/vscode-server-launcher/update";
impl Default for Update {
fn default() -> Self {
Self::new()
}
fn default() -> Self {
Self::new()
}
}
impl Update {
// Creates a new Update instance without authentication
pub fn new() -> Update {
Update {
client: reqwest::Client::new(),
}
}
// Creates a new Update instance without authentication
pub fn new() -> Update {
Update {
client: reqwest::Client::new(),
}
}
// Gets the asset to update to, or None if the current launcher is up to date.
pub async fn get_latest_release(&self) -> Result<LauncherRelease, errors::AnyError> {
let res = self
.client
.get(LATEST_URL)
.header(
"User-Agent",
format!(
"vscode-server-launcher/{}",
LAUNCHER_VERSION.unwrap_or("dev")
),
)
.send()
.await?;
// Gets the asset to update to, or None if the current launcher is up to date.
pub async fn get_latest_release(&self) -> Result<LauncherRelease, errors::AnyError> {
let res = self
.client
.get(LATEST_URL)
.header(
"User-Agent",
format!(
"vscode-server-launcher/{}",
VSCODE_CLI_VERSION.unwrap_or("dev")
),
)
.send()
.await?;
if !res.status().is_success() {
return Err(errors::StatusError::from_res(res).await?.into());
}
if !res.status().is_success() {
return Err(errors::StatusError::from_res(res).await?.into());
}
Ok(res.json::<LauncherRelease>().await?)
}
Ok(res.json::<LauncherRelease>().await?)
}
pub async fn switch_to_release(
&self,
update: &LauncherRelease,
target_path: &Path,
) -> Result<(), errors::AnyError> {
let mut staging_path = target_path.to_owned();
staging_path.set_file_name(format!(
"{}.next",
target_path.file_name().unwrap().to_string_lossy()
));
pub async fn switch_to_release(
&self,
update: &LauncherRelease,
target_path: &Path,
) -> Result<(), errors::AnyError> {
let mut staging_path = target_path.to_owned();
staging_path.set_file_name(format!(
"{}.next",
target_path.file_name().unwrap().to_string_lossy()
));
let an = LAUNCHER_ASSET_NAME.unwrap();
let mut url = format!("{}/{}/{}", update.url, an, an);
if cfg!(target_os = "windows") {
url += ".exe";
}
let an = VSCODE_CLI_ASSET_NAME.unwrap();
let mut url = format!("{}/{}/{}", update.url, an, an);
if cfg!(target_os = "windows") {
url += ".exe";
}
let res = self.client.get(url).send().await?;
let res = self.client.get(url).send().await?;
if !res.status().is_success() {
return Err(errors::StatusError::from_res(res).await?.into());
}
if !res.status().is_success() {
return Err(errors::StatusError::from_res(res).await?.into());
}
http::download_into_file(&staging_path, SilentCopyProgress(), res).await?;
http::download_into_file(&staging_path, SilentCopyProgress(), res).await?;
copy_file_metadata(target_path, &staging_path)
.map_err(|e| errors::wrap(e, "failed to set file permissions"))?;
copy_file_metadata(target_path, &staging_path)
.map_err(|e| errors::wrap(e, "failed to set file permissions"))?;
rename(&staging_path, &target_path)
.map_err(|e| errors::wrap(e, "failed to copy new launcher version"))?;
rename(&staging_path, &target_path)
.map_err(|e| errors::wrap(e, "failed to copy new launcher version"))?;
Ok(())
}
Ok(())
}
}
#[derive(Deserialize, Clone)]
pub struct LauncherRelease {
pub version: String,
pub url: String,
pub released_at: u64,
pub version: String,
pub url: String,
pub released_at: u64,
}
#[cfg(target_os = "windows")]
fn copy_file_metadata(from: &Path, to: &Path) -> Result<(), std::io::Error> {
let permissions = from.metadata()?.permissions();
set_permissions(&to, permissions)?;
Ok(())
let permissions = from.metadata()?.permissions();
set_permissions(&to, permissions)?;
Ok(())
}
#[cfg(not(target_os = "windows"))]
fn copy_file_metadata(from: &Path, to: &Path) -> Result<(), std::io::Error> {
use std::os::unix::ffi::OsStrExt;
use std::os::unix::fs::MetadataExt;
use std::os::unix::ffi::OsStrExt;
use std::os::unix::fs::MetadataExt;
let metadata = from.metadata()?;
set_permissions(&to, metadata.permissions())?;
let metadata = from.metadata()?;
set_permissions(&to, metadata.permissions())?;
// based on coreutils' chown https://github.com/uutils/coreutils/blob/72b4629916abe0852ad27286f4e307fbca546b6e/src/chown/chown.rs#L266-L281
let s = std::ffi::CString::new(to.as_os_str().as_bytes()).unwrap();
let ret = unsafe { libc::chown(s.as_ptr(), metadata.uid(), metadata.gid()) };
if ret != 0 {
return Err(std::io::Error::last_os_error());
}
// based on coreutils' chown https://github.com/uutils/coreutils/blob/72b4629916abe0852ad27286f4e307fbca546b6e/src/chown/chown.rs#L266-L281
let s = std::ffi::CString::new(to.as_os_str().as_bytes()).unwrap();
let ret = unsafe { libc::chown(s.as_ptr(), metadata.uid(), metadata.gid()) };
if ret != 0 {
return Err(std::io::Error::last_os_error());
}
Ok(())
Ok(())
}

View file

@ -8,266 +8,266 @@ use std::path::Path;
use serde::Deserialize;
use crate::{
constants::VSCODE_CLI_UPDATE_ENDPOINT,
debug, log, options, spanf,
util::{
errors::{
AnyError, StatusError, UnsupportedPlatformError, UpdatesNotConfigured, WrappedError,
},
io::ReportCopyProgress,
},
constants::VSCODE_CLI_UPDATE_ENDPOINT,
debug, log, options, spanf,
util::{
errors::{
AnyError, StatusError, UnsupportedPlatformError, UpdatesNotConfigured, WrappedError,
},
io::ReportCopyProgress,
},
};
/// Implementation of the VS Code Update service for use in the CLI.
pub struct UpdateService {
client: reqwest::Client,
log: log::Logger,
client: reqwest::Client,
log: log::Logger,
}
/// Describes a specific release, can be created manually or returned from the update service.
pub struct Release {
pub platform: Platform,
pub target: TargetKind,
pub quality: options::Quality,
pub commit: String,
pub platform: Platform,
pub target: TargetKind,
pub quality: options::Quality,
pub commit: String,
}
#[derive(Deserialize)]
struct UpdateServerVersion {
pub version: String,
pub version: String,
}
fn quality_download_segment(quality: options::Quality) -> &'static str {
match quality {
options::Quality::Stable => "stable",
options::Quality::Insiders => "insider",
options::Quality::Exploration => "exploration",
}
match quality {
options::Quality::Stable => "stable",
options::Quality::Insiders => "insider",
options::Quality::Exploration => "exploration",
}
}
impl UpdateService {
pub fn new(log: log::Logger, client: reqwest::Client) -> Self {
UpdateService { client, log }
}
pub fn new(log: log::Logger, client: reqwest::Client) -> Self {
UpdateService { client, log }
}
pub async fn get_release_by_semver_version(
&self,
platform: Platform,
target: TargetKind,
quality: options::Quality,
version: &str,
) -> Result<Release, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = target
.download_segment(platform)
.ok_or(UnsupportedPlatformError())?;
let download_url = format!(
"{}/api/versions/{}/{}/{}",
update_endpoint,
version,
download_segment,
quality_download_segment(quality),
);
pub async fn get_release_by_semver_version(
&self,
platform: Platform,
target: TargetKind,
quality: options::Quality,
version: &str,
) -> Result<Release, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = target
.download_segment(platform)
.ok_or(UnsupportedPlatformError())?;
let download_url = format!(
"{}/api/versions/{}/{}/{}",
update_endpoint,
version,
download_segment,
quality_download_segment(quality),
);
let response = spanf!(
self.log,
self.log.span("server.version.resolve"),
self.client.get(download_url).send()
)?;
let response = spanf!(
self.log,
self.log.span("server.version.resolve"),
self.client.get(download_url).send()
)?;
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
let res = response.json::<UpdateServerVersion>().await?;
debug!(self.log, "Resolved version {} to {}", version, res.version);
let res = response.json::<UpdateServerVersion>().await?;
debug!(self.log, "Resolved version {} to {}", version, res.version);
Ok(Release {
target,
platform,
quality,
commit: res.version,
})
}
Ok(Release {
target,
platform,
quality,
commit: res.version,
})
}
/// Gets the latest commit for the target of the given quality.
pub async fn get_latest_commit(
&self,
platform: Platform,
target: TargetKind,
quality: options::Quality,
) -> Result<Release, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = target
.download_segment(platform)
.ok_or(UnsupportedPlatformError())?;
let download_url = format!(
"{}/api/latest/{}/{}",
update_endpoint,
download_segment,
quality_download_segment(quality),
);
/// Gets the latest commit for the target of the given quality.
pub async fn get_latest_commit(
&self,
platform: Platform,
target: TargetKind,
quality: options::Quality,
) -> Result<Release, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = target
.download_segment(platform)
.ok_or(UnsupportedPlatformError())?;
let download_url = format!(
"{}/api/latest/{}/{}",
update_endpoint,
download_segment,
quality_download_segment(quality),
);
let response = spanf!(
self.log,
self.log.span("server.version.resolve"),
self.client.get(download_url).send()
)?;
let response = spanf!(
self.log,
self.log.span("server.version.resolve"),
self.client.get(download_url).send()
)?;
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
let res = response.json::<UpdateServerVersion>().await?;
debug!(self.log, "Resolved quality {} to {}", quality, res.version);
let res = response.json::<UpdateServerVersion>().await?;
debug!(self.log, "Resolved quality {} to {}", quality, res.version);
Ok(Release {
target,
platform,
quality,
commit: res.version,
})
}
Ok(Release {
target,
platform,
quality,
commit: res.version,
})
}
/// Gets the download stream for the release.
pub async fn get_download_stream(
&self,
release: &Release,
) -> Result<reqwest::Response, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = release
.target
.download_segment(release.platform)
.ok_or(UnsupportedPlatformError())?;
/// Gets the download stream for the release.
pub async fn get_download_stream(
&self,
release: &Release,
) -> Result<reqwest::Response, AnyError> {
let update_endpoint = VSCODE_CLI_UPDATE_ENDPOINT.ok_or(UpdatesNotConfigured())?;
let download_segment = release
.target
.download_segment(release.platform)
.ok_or(UnsupportedPlatformError())?;
let download_url = format!(
"{}/commit:{}/{}/{}",
update_endpoint,
release.commit,
download_segment,
quality_download_segment(release.quality),
);
let download_url = format!(
"{}/commit:{}/{}/{}",
update_endpoint,
release.commit,
download_segment,
quality_download_segment(release.quality),
);
let response = reqwest::get(&download_url).await?;
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
let response = reqwest::get(&download_url).await?;
if !response.status().is_success() {
return Err(StatusError::from_res(response).await?.into());
}
Ok(response)
}
Ok(response)
}
}
pub fn unzip_downloaded_release<T>(
compressed_file: &Path,
target_dir: &Path,
reporter: T,
compressed_file: &Path,
target_dir: &Path,
reporter: T,
) -> Result<(), WrappedError>
where
T: ReportCopyProgress,
T: ReportCopyProgress,
{
#[cfg(any(target_os = "windows", target_os = "macos"))]
{
use crate::util::zipper;
zipper::unzip_file(compressed_file, target_dir, reporter)
}
#[cfg(target_os = "linux")]
{
use crate::util::tar;
tar::decompress_tarball(compressed_file, target_dir, reporter)
}
#[cfg(any(target_os = "windows", target_os = "macos"))]
{
use crate::util::zipper;
zipper::unzip_file(compressed_file, target_dir, reporter)
}
#[cfg(target_os = "linux")]
{
use crate::util::tar;
tar::decompress_tarball(compressed_file, target_dir, reporter)
}
}
#[derive(Eq, PartialEq, Copy, Clone)]
pub enum TargetKind {
Server,
Archive,
Web,
Server,
Archive,
Web,
}
impl TargetKind {
fn download_segment(&self, platform: Platform) -> Option<String> {
match *self {
TargetKind::Server => Some(platform.headless()),
TargetKind::Archive => platform.archive(),
TargetKind::Web => Some(platform.web()),
}
}
fn download_segment(&self, platform: Platform) -> Option<String> {
match *self {
TargetKind::Server => Some(platform.headless()),
TargetKind::Archive => platform.archive(),
TargetKind::Web => Some(platform.web()),
}
}
}
#[derive(Debug, Copy, Clone)]
pub enum Platform {
LinuxAlpineX64,
LinuxAlpineARM64,
LinuxX64,
LinuxARM64,
LinuxARM32,
DarwinX64,
DarwinARM64,
WindowsX64,
WindowsX86,
LinuxAlpineX64,
LinuxAlpineARM64,
LinuxX64,
LinuxARM64,
LinuxARM32,
DarwinX64,
DarwinARM64,
WindowsX64,
WindowsX86,
}
impl Platform {
pub fn archive(&self) -> Option<String> {
match self {
Platform::LinuxX64 => Some("linux-x64".to_owned()),
Platform::LinuxARM64 => Some("linux-arm64".to_owned()),
Platform::LinuxARM32 => Some("linux-armhf".to_owned()),
Platform::DarwinX64 => Some("darwin".to_owned()),
Platform::DarwinARM64 => Some("darwin-arm64".to_owned()),
Platform::WindowsX64 => Some("win32-x64-archive".to_owned()),
Platform::WindowsX86 => Some("win32-archive".to_owned()),
_ => None,
}
}
pub fn headless(&self) -> String {
match self {
Platform::LinuxAlpineARM64 => "server-alpine-arm64",
Platform::LinuxAlpineX64 => "server-linux-alpine",
Platform::LinuxX64 => "server-linux-x64",
Platform::LinuxARM64 => "server-linux-arm64",
Platform::LinuxARM32 => "server-linux-armhf",
Platform::DarwinX64 => "server-darwin",
Platform::DarwinARM64 => "server-darwin-arm64",
Platform::WindowsX64 => "server-win32-x64",
Platform::WindowsX86 => "server-win32",
}
.to_owned()
}
pub fn archive(&self) -> Option<String> {
match self {
Platform::LinuxX64 => Some("linux-x64".to_owned()),
Platform::LinuxARM64 => Some("linux-arm64".to_owned()),
Platform::LinuxARM32 => Some("linux-armhf".to_owned()),
Platform::DarwinX64 => Some("darwin".to_owned()),
Platform::DarwinARM64 => Some("darwin-arm64".to_owned()),
Platform::WindowsX64 => Some("win32-x64-archive".to_owned()),
Platform::WindowsX86 => Some("win32-archive".to_owned()),
_ => None,
}
}
pub fn headless(&self) -> String {
match self {
Platform::LinuxAlpineARM64 => "server-alpine-arm64",
Platform::LinuxAlpineX64 => "server-linux-alpine",
Platform::LinuxX64 => "server-linux-x64",
Platform::LinuxARM64 => "server-linux-arm64",
Platform::LinuxARM32 => "server-linux-armhf",
Platform::DarwinX64 => "server-darwin",
Platform::DarwinARM64 => "server-darwin-arm64",
Platform::WindowsX64 => "server-win32-x64",
Platform::WindowsX86 => "server-win32",
}
.to_owned()
}
pub fn web(&self) -> String {
format!("{}-web", self.headless())
}
pub fn web(&self) -> String {
format!("{}-web", self.headless())
}
pub fn env_default() -> Option<Platform> {
if cfg!(all(
target_os = "linux",
target_arch = "x86_64",
target_env = "musl"
)) {
Some(Platform::LinuxAlpineX64)
} else if cfg!(all(
target_os = "linux",
target_arch = "aarch64",
target_env = "musl"
)) {
Some(Platform::LinuxAlpineARM64)
} else if cfg!(all(target_os = "linux", target_arch = "x86_64")) {
Some(Platform::LinuxX64)
} else if cfg!(all(target_os = "linux", target_arch = "armhf")) {
Some(Platform::LinuxARM32)
} else if cfg!(all(target_os = "linux", target_arch = "aarch64")) {
Some(Platform::LinuxARM64)
} else if cfg!(all(target_os = "macos", target_arch = "x86_64")) {
Some(Platform::DarwinX64)
} else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
Some(Platform::DarwinARM64)
} else if cfg!(all(target_os = "windows", target_arch = "x86_64")) {
Some(Platform::WindowsX64)
} else if cfg!(all(target_os = "windows", target_arch = "x86")) {
Some(Platform::WindowsX86)
} else {
None
}
}
pub fn env_default() -> Option<Platform> {
if cfg!(all(
target_os = "linux",
target_arch = "x86_64",
target_env = "musl"
)) {
Some(Platform::LinuxAlpineX64)
} else if cfg!(all(
target_os = "linux",
target_arch = "aarch64",
target_env = "musl"
)) {
Some(Platform::LinuxAlpineARM64)
} else if cfg!(all(target_os = "linux", target_arch = "x86_64")) {
Some(Platform::LinuxX64)
} else if cfg!(all(target_os = "linux", target_arch = "armhf")) {
Some(Platform::LinuxARM32)
} else if cfg!(all(target_os = "linux", target_arch = "aarch64")) {
Some(Platform::LinuxARM64)
} else if cfg!(all(target_os = "macos", target_arch = "x86_64")) {
Some(Platform::DarwinX64)
} else if cfg!(all(target_os = "macos", target_arch = "aarch64")) {
Some(Platform::DarwinARM64)
} else if cfg!(all(target_os = "windows", target_arch = "x86_64")) {
Some(Platform::WindowsX64)
} else if cfg!(all(target_os = "windows", target_arch = "x86")) {
Some(Platform::WindowsX86)
} else {
None
}
}
}

View file

@ -7,71 +7,71 @@ use std::{ffi::OsStr, process::Stdio};
use tokio::process::Command;
pub async fn capture_command<A, I, S>(
command_str: A,
args: I,
command_str: A,
args: I,
) -> Result<std::process::Output, WrappedError>
where
A: AsRef<OsStr>,
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
A: AsRef<OsStr>,
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
Command::new(&command_str)
.args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.output()
.await
.map_err(|e| {
wrap(
e,
format!(
"failed to execute command '{}'",
(&command_str).as_ref().to_string_lossy()
),
)
})
Command::new(&command_str)
.args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.output()
.await
.map_err(|e| {
wrap(
e,
format!(
"failed to execute command '{}'",
(&command_str).as_ref().to_string_lossy()
),
)
})
}
/// Kills and processes and all of its children.
#[cfg(target_os = "windows")]
pub async fn kill_tree(process_id: u32) -> Result<(), WrappedError> {
capture_command("taskkill", &["/t", "/pid", &process_id.to_string()]).await?;
Ok(())
capture_command("taskkill", &["/t", "/pid", &process_id.to_string()]).await?;
Ok(())
}
/// Kills and processes and all of its children.
#[cfg(not(target_os = "windows"))]
pub async fn kill_tree(process_id: u32) -> Result<(), WrappedError> {
use futures::future::join_all;
use tokio::io::{AsyncBufReadExt, BufReader};
use futures::future::join_all;
use tokio::io::{AsyncBufReadExt, BufReader};
async fn kill_single_pid(process_id_str: String) {
capture_command("kill", &[&process_id_str]).await.ok();
}
async fn kill_single_pid(process_id_str: String) {
capture_command("kill", &[&process_id_str]).await.ok();
}
// Rusty version of https://github.com/microsoft/vscode-js-debug/blob/main/src/targets/node/terminateProcess.sh
// Rusty version of https://github.com/microsoft/vscode-js-debug/blob/main/src/targets/node/terminateProcess.sh
let parent_id = process_id.to_string();
let mut prgrep_cmd = Command::new("pgrep")
.arg("-P")
.arg(&parent_id)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.spawn()
.map_err(|e| wrap(e, "error enumerating process tree"))?;
let parent_id = process_id.to_string();
let mut prgrep_cmd = Command::new("pgrep")
.arg("-P")
.arg(&parent_id)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.spawn()
.map_err(|e| wrap(e, "error enumerating process tree"))?;
let mut kill_futures = vec![tokio::spawn(
async move { kill_single_pid(parent_id).await },
)];
let mut kill_futures = vec![tokio::spawn(
async move { kill_single_pid(parent_id).await },
)];
if let Some(stdout) = prgrep_cmd.stdout.take() {
let mut reader = BufReader::new(stdout).lines();
while let Some(line) = reader.next_line().await.unwrap_or(None) {
kill_futures.push(tokio::spawn(async move { kill_single_pid(line).await }))
}
}
if let Some(stdout) = prgrep_cmd.stdout.take() {
let mut reader = BufReader::new(stdout).lines();
while let Some(line) = reader.next_line().await.unwrap_or(None) {
kill_futures.push(tokio::spawn(async move { kill_single_pid(line).await }))
}
}
join_all(kill_futures).await;
prgrep_cmd.kill().await.ok();
Ok(())
join_all(kill_futures).await;
prgrep_cmd.kill().await.ok();
Ok(())
}

View file

@ -9,89 +9,89 @@ use crate::constants::CONTROL_PORT;
// Wraps another error with additional info.
#[derive(Debug, Clone)]
pub struct WrappedError {
message: String,
original: String,
message: String,
original: String,
}
impl std::fmt::Display for WrappedError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}: {}", self.message, self.original)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}: {}", self.message, self.original)
}
}
impl std::error::Error for WrappedError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
impl WrappedError {
// fn new(original: Box<dyn std::error::Error>, message: String) -> WrappedError {
// WrappedError { message, original }
// }
// fn new(original: Box<dyn std::error::Error>, message: String) -> WrappedError {
// WrappedError { message, original }
// }
}
impl From<reqwest::Error> for WrappedError {
fn from(e: reqwest::Error) -> WrappedError {
WrappedError {
message: format!(
"error requesting {}",
e.url().map_or("<unknown>", |u| u.as_str())
),
original: format!("{}", e),
}
}
fn from(e: reqwest::Error) -> WrappedError {
WrappedError {
message: format!(
"error requesting {}",
e.url().map_or("<unknown>", |u| u.as_str())
),
original: format!("{}", e),
}
}
}
pub fn wrap<T, S>(original: T, message: S) -> WrappedError
where
T: Display,
S: Into<String>,
T: Display,
S: Into<String>,
{
WrappedError {
message: message.into(),
original: format!("{}", original),
}
WrappedError {
message: message.into(),
original: format!("{}", original),
}
}
// Error generated by an unsuccessful HTTP response
#[derive(Debug)]
pub struct StatusError {
url: String,
status_code: u16,
body: String,
url: String,
status_code: u16,
body: String,
}
impl std::fmt::Display for StatusError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"error requesting {}: {} {}",
self.url, self.status_code, self.body
)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"error requesting {}: {} {}",
self.url, self.status_code, self.body
)
}
}
impl StatusError {
pub async fn from_res(res: reqwest::Response) -> Result<StatusError, AnyError> {
let status_code = res.status().as_u16();
let url = res.url().to_string();
let body = res.text().await.map_err(|e| {
wrap(
e,
format!(
"failed to read response body on {} code from {}",
status_code, url
),
)
})?;
pub async fn from_res(res: reqwest::Response) -> Result<StatusError, AnyError> {
let status_code = res.status().as_u16();
let url = res.url().to_string();
let body = res.text().await.map_err(|e| {
wrap(
e,
format!(
"failed to read response body on {} code from {}",
status_code, url
),
)
})?;
Ok(StatusError {
url,
status_code,
body,
})
}
Ok(StatusError {
url,
status_code,
body,
})
}
}
// When the user has not consented to the licensing terms in using the Launcher
@ -99,9 +99,9 @@ impl StatusError {
pub struct MissingLegalConsent(pub String);
impl std::fmt::Display for MissingLegalConsent {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
// When the provided connection token doesn't match the one used to set up the original VS Code Server
@ -110,9 +110,9 @@ impl std::fmt::Display for MissingLegalConsent {
pub struct MismatchConnectionToken(pub String);
impl std::fmt::Display for MismatchConnectionToken {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
// When the VS Code server has an unrecognized extension (rather than zip or gz)
@ -120,9 +120,9 @@ impl std::fmt::Display for MismatchConnectionToken {
pub struct InvalidServerExtensionError(pub String);
impl std::fmt::Display for InvalidServerExtensionError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "invalid server extension '{}'", self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "invalid server extension '{}'", self.0)
}
}
// When the tunnel fails to open
@ -130,15 +130,15 @@ impl std::fmt::Display for InvalidServerExtensionError {
pub struct DevTunnelError(pub String);
impl std::fmt::Display for DevTunnelError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "could not open tunnel: {}", self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "could not open tunnel: {}", self.0)
}
}
impl std::error::Error for DevTunnelError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
// When the server was downloaded, but the entrypoint scripts don't exist.
@ -146,214 +146,214 @@ impl std::error::Error for DevTunnelError {
pub struct MissingEntrypointError();
impl std::fmt::Display for MissingEntrypointError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Missing entrypoints in server download. Most likely this is a corrupted download. Please retry")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Missing entrypoints in server download. Most likely this is a corrupted download. Please retry")
}
}
#[derive(Debug)]
pub struct SetupError(pub String);
impl std::fmt::Display for SetupError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{}\r\n\r\nMore info at https://code.visualstudio.com/docs/remote/linux",
self.0
)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{}\r\n\r\nMore info at https://code.visualstudio.com/docs/remote/linux",
self.0
)
}
}
#[derive(Debug)]
pub struct NoHomeForLauncherError();
impl std::fmt::Display for NoHomeForLauncherError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"No $HOME variable was found in your environment. Either set it, or specify a `--data-dir` manually when invoking the launcher.",
)
}
}
}
#[derive(Debug)]
pub struct InvalidTunnelName(pub String);
impl std::fmt::Display for InvalidTunnelName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
#[derive(Debug)]
pub struct TunnelCreationFailed(pub String, pub String);
impl std::fmt::Display for TunnelCreationFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Could not create tunnel with name: {}\nReason: {}",
&self.0, &self.1
)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Could not create tunnel with name: {}\nReason: {}",
&self.0, &self.1
)
}
}
#[derive(Debug)]
pub struct TunnelHostFailed(pub String);
impl std::fmt::Display for TunnelHostFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
#[derive(Debug)]
pub struct ExtensionInstallFailed(pub String);
impl std::fmt::Display for ExtensionInstallFailed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Extension install failed: {}", &self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Extension install failed: {}", &self.0)
}
}
#[derive(Debug)]
pub struct MismatchedLaunchModeError();
impl std::fmt::Display for MismatchedLaunchModeError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "A server is already running, but it was not launched in the same listening mode (port vs. socket) as this request")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "A server is already running, but it was not launched in the same listening mode (port vs. socket) as this request")
}
}
#[derive(Debug)]
pub struct NoAttachedServerError();
impl std::fmt::Display for NoAttachedServerError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "No server is running")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "No server is running")
}
}
#[derive(Debug)]
pub struct ServerWriteError();
impl std::fmt::Display for ServerWriteError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Error writing to the server, it should be restarted")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Error writing to the server, it should be restarted")
}
}
#[derive(Debug)]
pub struct RefreshTokenNotAvailableError();
impl std::fmt::Display for RefreshTokenNotAvailableError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Refresh token not available, authentication is required")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Refresh token not available, authentication is required")
}
}
#[derive(Debug)]
pub struct UnsupportedPlatformError();
impl std::fmt::Display for UnsupportedPlatformError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"This operation is not supported on your current platform"
)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"This operation is not supported on your current platform"
)
}
}
#[derive(Debug)]
pub struct NoInstallInUserProvidedPath(pub String);
impl std::fmt::Display for NoInstallInUserProvidedPath {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"No VS Code installation could be found in {}. You can run `code --use-quality=stable` to switch to the latest stable version of VS Code.",
self.0
)
}
}
}
#[derive(Debug)]
pub struct InvalidRequestedVersion();
impl std::fmt::Display for InvalidRequestedVersion {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"The reqested version is invalid, expected one of 'stable', 'insiders', version number (x.y.z), or absolute path.",
)
}
}
}
#[derive(Debug)]
pub struct UserCancelledInstallation();
impl std::fmt::Display for UserCancelledInstallation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Installation aborted.")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Installation aborted.")
}
}
#[derive(Debug)]
pub struct CannotForwardControlPort();
impl std::fmt::Display for CannotForwardControlPort {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Cannot forward or unforward port {}.", CONTROL_PORT)
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Cannot forward or unforward port {}.", CONTROL_PORT)
}
}
#[derive(Debug)]
pub struct ServerHasClosed();
impl std::fmt::Display for ServerHasClosed {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Request cancelled because the server has closed")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Request cancelled because the server has closed")
}
}
#[derive(Debug)]
pub struct UpdatesNotConfigured();
impl std::fmt::Display for UpdatesNotConfigured {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Update service is not configured")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Update service is not configured")
}
}
#[derive(Debug)]
pub struct ServiceAlreadyRegistered();
impl std::fmt::Display for ServiceAlreadyRegistered {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Already registered the service. Run `code tunnel service uninstall` to unregister it first")
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "Already registered the service. Run `code tunnel service uninstall` to unregister it first")
}
}
#[derive(Debug)]
pub struct WindowsNeedsElevation(pub String);
impl std::fmt::Display for WindowsNeedsElevation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
writeln!(f, "{}", self.0)?;
writeln!(f)?;
writeln!(f, "You may need to run this command as an administrator:")?;
writeln!(f, " 1. Open the start menu and search for Powershell")?;
writeln!(f, " 2. Right click and 'Run as administrator'")?;
if let Ok(exe) = std::env::current_exe() {
writeln!(
f,
" 3. Run &'{}' '{}'",
exe.display(),
std::env::args().skip(1).collect::<Vec<_>>().join("' '")
)
} else {
writeln!(f, " 3. Run the same command again",)
}
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
writeln!(f, "{}", self.0)?;
writeln!(f)?;
writeln!(f, "You may need to run this command as an administrator:")?;
writeln!(f, " 1. Open the start menu and search for Powershell")?;
writeln!(f, " 2. Right click and 'Run as administrator'")?;
if let Ok(exe) = std::env::current_exe() {
writeln!(
f,
" 3. Run &'{}' '{}'",
exe.display(),
std::env::args().skip(1).collect::<Vec<_>>().join("' '")
)
} else {
writeln!(f, " 3. Run the same command again",)
}
}
}
// Makes an "AnyError" enum that contains any of the given errors, in the form
@ -392,36 +392,36 @@ macro_rules! makeAnyError {
}
makeAnyError!(
MissingLegalConsent,
MismatchConnectionToken,
DevTunnelError,
StatusError,
WrappedError,
InvalidServerExtensionError,
MissingEntrypointError,
SetupError,
NoHomeForLauncherError,
TunnelCreationFailed,
TunnelHostFailed,
InvalidTunnelName,
ExtensionInstallFailed,
MismatchedLaunchModeError,
NoAttachedServerError,
ServerWriteError,
UnsupportedPlatformError,
RefreshTokenNotAvailableError,
NoInstallInUserProvidedPath,
UserCancelledInstallation,
InvalidRequestedVersion,
CannotForwardControlPort,
ServerHasClosed,
ServiceAlreadyRegistered,
WindowsNeedsElevation,
UpdatesNotConfigured
MissingLegalConsent,
MismatchConnectionToken,
DevTunnelError,
StatusError,
WrappedError,
InvalidServerExtensionError,
MissingEntrypointError,
SetupError,
NoHomeForLauncherError,
TunnelCreationFailed,
TunnelHostFailed,
InvalidTunnelName,
ExtensionInstallFailed,
MismatchedLaunchModeError,
NoAttachedServerError,
ServerWriteError,
UnsupportedPlatformError,
RefreshTokenNotAvailableError,
NoInstallInUserProvidedPath,
UserCancelledInstallation,
InvalidRequestedVersion,
CannotForwardControlPort,
ServerHasClosed,
ServiceAlreadyRegistered,
WindowsNeedsElevation,
UpdatesNotConfigured
);
impl From<reqwest::Error> for AnyError {
fn from(e: reqwest::Error) -> AnyError {
AnyError::WrappedError(WrappedError::from(e))
}
fn from(e: reqwest::Error) -> AnyError {
AnyError::WrappedError(WrappedError::from(e))
}
}

View file

@ -10,27 +10,27 @@ use tokio_util::compat::FuturesAsyncReadCompatExt;
use super::io::{copy_async_progress, ReportCopyProgress};
pub async fn download_into_file<T>(
filename: &std::path::Path,
progress: T,
res: reqwest::Response,
filename: &std::path::Path,
progress: T,
res: reqwest::Response,
) -> Result<fs::File, WrappedError>
where
T: ReportCopyProgress,
T: ReportCopyProgress,
{
let mut file = fs::File::create(filename)
.await
.map_err(|e| errors::wrap(e, "failed to create file"))?;
let mut file = fs::File::create(filename)
.await
.map_err(|e| errors::wrap(e, "failed to create file"))?;
let content_length = res.content_length().unwrap_or(0);
let mut read = res
.bytes_stream()
.map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e))
.into_async_read()
.compat();
let content_length = res.content_length().unwrap_or(0);
let mut read = res
.bytes_stream()
.map_err(|e| futures::io::Error::new(futures::io::ErrorKind::Other, e))
.into_async_read()
.compat();
copy_async_progress(progress, &mut read, &mut file, content_length)
.await
.map_err(|e| errors::wrap(e, "failed to download file"))?;
copy_async_progress(progress, &mut read, &mut file, content_length)
.await
.map_err(|e| errors::wrap(e, "failed to download file"))?;
Ok(file)
Ok(file)
}

View file

@ -11,59 +11,59 @@ use super::{errors::WrappedError, io::ReportCopyProgress};
/// Wrapper around indicatif::ProgressBar that implements ReportCopyProgress.
pub struct ProgressBarReporter {
bar: ProgressBar,
has_set_total: bool,
bar: ProgressBar,
has_set_total: bool,
}
impl From<ProgressBar> for ProgressBarReporter {
fn from(bar: ProgressBar) -> Self {
ProgressBarReporter {
bar,
has_set_total: false,
}
}
fn from(bar: ProgressBar) -> Self {
ProgressBarReporter {
bar,
has_set_total: false,
}
}
}
impl ReportCopyProgress for ProgressBarReporter {
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64) {
if !self.has_set_total {
self.bar.set_length(total_bytes);
}
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64) {
if !self.has_set_total {
self.bar.set_length(total_bytes);
}
if bytes_so_far == total_bytes {
self.bar.finish_and_clear();
} else {
self.bar.set_position(bytes_so_far);
}
}
if bytes_so_far == total_bytes {
self.bar.finish_and_clear();
} else {
self.bar.set_position(bytes_so_far);
}
}
}
pub fn prompt_yn(text: &str) -> Result<bool, WrappedError> {
Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(text)
.default(true)
.interact()
.map_err(|e| wrap(e, "Failed to read confirm input"))
Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(text)
.default(true)
.interact()
.map_err(|e| wrap(e, "Failed to read confirm input"))
}
pub fn prompt_options<T>(text: &str, options: &[T]) -> Result<T, WrappedError>
where
T: Display + Copy,
T: Display + Copy,
{
let chosen = Select::with_theme(&ColorfulTheme::default())
.with_prompt(text)
.items(options)
.default(0)
.interact()
.map_err(|e| wrap(e, "Failed to read select input"))?;
let chosen = Select::with_theme(&ColorfulTheme::default())
.with_prompt(text)
.items(options)
.default(0)
.interact()
.map_err(|e| wrap(e, "Failed to read select input"))?;
Ok(options[chosen])
Ok(options[chosen])
}
pub fn prompt_placeholder(question: &str, placeholder: &str) -> Result<String, WrappedError> {
Input::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.default(placeholder.to_string())
.interact_text()
.map_err(|e| wrap(e, "Failed to read confirm input"))
Input::with_theme(&ColorfulTheme::default())
.with_prompt(question)
.default(placeholder.to_string())
.interact_text()
.map_err(|e| wrap(e, "Failed to read confirm input"))
}

View file

@ -7,53 +7,53 @@ use std::io;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
pub trait ReportCopyProgress {
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64);
fn report_progress(&mut self, bytes_so_far: u64, total_bytes: u64);
}
/// Type that doesn't emit anything for download progress.
pub struct SilentCopyProgress();
impl ReportCopyProgress for SilentCopyProgress {
fn report_progress(&mut self, _bytes_so_far: u64, _total_bytes: u64) {}
fn report_progress(&mut self, _bytes_so_far: u64, _total_bytes: u64) {}
}
/// Copies from the reader to the writer, reporting progress to the provided
/// reporter every so often.
pub async fn copy_async_progress<T, R, W>(
mut reporter: T,
reader: &mut R,
writer: &mut W,
total_bytes: u64,
mut reporter: T,
reader: &mut R,
writer: &mut W,
total_bytes: u64,
) -> io::Result<u64>
where
R: AsyncRead + Unpin,
W: AsyncWrite + Unpin,
T: ReportCopyProgress,
R: AsyncRead + Unpin,
W: AsyncWrite + Unpin,
T: ReportCopyProgress,
{
let mut buf = vec![0; 8 * 1024];
let mut bytes_so_far = 0;
let mut bytes_last_reported = 0;
let report_granularity = std::cmp::min(total_bytes / 10, 2 * 1024 * 1024);
let mut buf = vec![0; 8 * 1024];
let mut bytes_so_far = 0;
let mut bytes_last_reported = 0;
let report_granularity = std::cmp::min(total_bytes / 10, 2 * 1024 * 1024);
reporter.report_progress(0, total_bytes);
reporter.report_progress(0, total_bytes);
loop {
let read_buf = match reader.read(&mut buf).await {
Ok(0) => break,
Ok(n) => &buf[..n],
Err(e) => return Err(e),
};
loop {
let read_buf = match reader.read(&mut buf).await {
Ok(0) => break,
Ok(n) => &buf[..n],
Err(e) => return Err(e),
};
writer.write_all(read_buf).await?;
writer.write_all(read_buf).await?;
bytes_so_far += read_buf.len() as u64;
if bytes_so_far - bytes_last_reported > report_granularity {
bytes_last_reported = bytes_so_far;
reporter.report_progress(bytes_so_far, total_bytes);
}
}
bytes_so_far += read_buf.len() as u64;
if bytes_so_far - bytes_last_reported > report_granularity {
bytes_last_reported = bytes_so_far;
reporter.report_progress(bytes_so_far, total_bytes);
}
}
reporter.report_progress(bytes_so_far, total_bytes);
reporter.report_progress(bytes_so_far, total_bytes);
Ok(bytes_so_far)
Ok(bytes_so_far)
}

View file

@ -7,72 +7,72 @@ use std::path::Path;
use sysinfo::{Pid, PidExt, ProcessExt, System, SystemExt};
pub fn process_at_path_exists(pid: u32, name: &Path) -> bool {
// TODO https://docs.rs/sysinfo/latest/sysinfo/index.html#usage
let mut sys = System::new_all();
sys.refresh_processes();
// TODO https://docs.rs/sysinfo/latest/sysinfo/index.html#usage
let mut sys = System::new_all();
sys.refresh_processes();
let name_str = format!("{}", name.display());
match sys.process(Pid::from_u32(pid)) {
Some(process) => {
for cmd in process.cmd() {
if cmd.contains(&name_str) {
return true;
}
}
}
None => {
return false;
}
}
let name_str = format!("{}", name.display());
match sys.process(Pid::from_u32(pid)) {
Some(process) => {
for cmd in process.cmd() {
if cmd.contains(&name_str) {
return true;
}
}
}
None => {
return false;
}
}
false
false
}
pub fn process_exists(pid: u32) -> bool {
let mut sys = System::new_all();
sys.refresh_processes();
sys.process(Pid::from_u32(pid)).is_some()
let mut sys = System::new_all();
sys.refresh_processes();
sys.process(Pid::from_u32(pid)).is_some()
}
pub fn find_running_process(name: &Path) -> Option<u32> {
// TODO https://docs.rs/sysinfo/latest/sysinfo/index.html#usage
let mut sys = System::new_all();
sys.refresh_processes();
// TODO https://docs.rs/sysinfo/latest/sysinfo/index.html#usage
let mut sys = System::new_all();
sys.refresh_processes();
let name_str = format!("{}", name.display());
let name_str = format!("{}", name.display());
for (pid, process) in sys.processes() {
for cmd in process.cmd() {
if cmd.contains(&name_str) {
return Some(pid.as_u32());
}
}
}
None
for (pid, process) in sys.processes() {
for cmd in process.cmd() {
if cmd.contains(&name_str) {
return Some(pid.as_u32());
}
}
}
None
}
#[cfg(not(target_family = "unix"))]
pub async fn set_executable_permission<P: AsRef<std::path::Path>>(
_file: P,
_file: P,
) -> Result<(), errors::WrappedError> {
Ok(())
Ok(())
}
#[cfg(target_family = "unix")]
pub async fn set_executable_permission<P: AsRef<std::path::Path>>(
file: P,
file: P,
) -> Result<(), errors::WrappedError> {
use std::os::unix::prelude::PermissionsExt;
use std::os::unix::prelude::PermissionsExt;
let mut permissions = tokio::fs::metadata(&file)
.await
.map_err(|e| errors::wrap(e, "failed to read executable file metadata"))?
.permissions();
let mut permissions = tokio::fs::metadata(&file)
.await
.map_err(|e| errors::wrap(e, "failed to read executable file metadata"))?
.permissions();
permissions.set_mode(0o750);
permissions.set_mode(0o750);
tokio::fs::set_permissions(&file, permissions)
.await
.map_err(|e| errors::wrap(e, "failed to set executable permissions"))?;
tokio::fs::set_permissions(&file, permissions)
.await
.map_err(|e| errors::wrap(e, "failed to set executable permissions"))?;
Ok(())
Ok(())
}

View file

@ -15,245 +15,245 @@ use tokio::fs;
use super::errors::AnyError;
lazy_static! {
static ref LDCONFIG_STDC_RE: Regex = Regex::new(r"libstdc\+\+.* => (.+)").unwrap();
static ref LDD_VERSION_RE: BinRegex = BinRegex::new(r"^ldd.*(.+)\.(.+)\s").unwrap();
static ref LIBSTD_CXX_VERSION_RE: BinRegex =
BinRegex::new(r"GLIBCXX_([0-9]+)\.([0-9]+)(?:\.([0-9]+))?").unwrap();
static ref MIN_CXX_VERSION: SimpleSemver = SimpleSemver::new(3, 4, 18);
static ref MIN_LDD_VERSION: SimpleSemver = SimpleSemver::new(2, 17, 0);
static ref LDCONFIG_STDC_RE: Regex = Regex::new(r"libstdc\+\+.* => (.+)").unwrap();
static ref LDD_VERSION_RE: BinRegex = BinRegex::new(r"^ldd.*(.+)\.(.+)\s").unwrap();
static ref LIBSTD_CXX_VERSION_RE: BinRegex =
BinRegex::new(r"GLIBCXX_([0-9]+)\.([0-9]+)(?:\.([0-9]+))?").unwrap();
static ref MIN_CXX_VERSION: SimpleSemver = SimpleSemver::new(3, 4, 18);
static ref MIN_LDD_VERSION: SimpleSemver = SimpleSemver::new(2, 17, 0);
}
pub struct PreReqChecker {}
impl Default for PreReqChecker {
fn default() -> Self {
Self::new()
}
fn default() -> Self {
Self::new()
}
}
impl PreReqChecker {
pub fn new() -> PreReqChecker {
PreReqChecker {}
}
pub fn new() -> PreReqChecker {
PreReqChecker {}
}
#[cfg(not(target_os = "linux"))]
pub async fn verify(&self) -> Result<Platform, AnyError> {
Platform::env_default().ok_or_else(|| {
SetupError("VS Code it not supported on this platform".to_owned()).into()
})
}
#[cfg(not(target_os = "linux"))]
pub async fn verify(&self) -> Result<Platform, AnyError> {
Platform::env_default().ok_or_else(|| {
SetupError("VS Code it not supported on this platform".to_owned()).into()
})
}
#[cfg(target_os = "linux")]
pub async fn verify(&self) -> Result<Platform, AnyError> {
let (gnu_a, gnu_b, or_musl) = tokio::join!(
check_glibc_version(),
check_glibcxx_version(),
check_musl_interpreter()
);
#[cfg(target_os = "linux")]
pub async fn verify(&self) -> Result<Platform, AnyError> {
let (gnu_a, gnu_b, or_musl) = tokio::join!(
check_glibc_version(),
check_glibcxx_version(),
check_musl_interpreter()
);
if gnu_a.is_ok() && gnu_b.is_ok() {
return Ok(if cfg!(target_arch = "x86_64") {
Platform::LinuxX64
} else if cfg!(target_arch = "armhf") {
Platform::LinuxARM32
} else {
Platform::LinuxARM64
});
}
if gnu_a.is_ok() && gnu_b.is_ok() {
return Ok(if cfg!(target_arch = "x86_64") {
Platform::LinuxX64
} else if cfg!(target_arch = "armhf") {
Platform::LinuxARM32
} else {
Platform::LinuxARM64
});
}
if or_musl.is_ok() {
return Ok(if cfg!(target_arch = "x86_64") {
Platform::LinuxAlpineX64
} else {
Platform::LinuxAlpineARM64
});
}
if or_musl.is_ok() {
return Ok(if cfg!(target_arch = "x86_64") {
Platform::LinuxAlpineX64
} else {
Platform::LinuxAlpineARM64
});
}
let mut errors: Vec<String> = vec![];
if let Err(e) = gnu_a {
errors.push(e);
} else if let Err(e) = gnu_b {
errors.push(e);
}
let mut errors: Vec<String> = vec![];
if let Err(e) = gnu_a {
errors.push(e);
} else if let Err(e) = gnu_b {
errors.push(e);
}
if let Err(e) = or_musl {
errors.push(e);
}
if let Err(e) = or_musl {
errors.push(e);
}
let bullets = errors
.iter()
.map(|e| format!(" - {}", e))
.collect::<Vec<String>>()
.join("\n");
let bullets = errors
.iter()
.map(|e| format!(" - {}", e))
.collect::<Vec<String>>()
.join("\n");
Err(AnyError::from(SetupError(format!(
"This machine not meet VS Code Server's prerequisites, expected either...\n{}",
bullets,
))))
}
Err(AnyError::from(SetupError(format!(
"This machine not meet VS Code Server's prerequisites, expected either...\n{}",
bullets,
))))
}
}
#[allow(dead_code)]
async fn check_musl_interpreter() -> Result<(), String> {
const MUSL_PATH: &str = if cfg!(target_platform = "aarch64") {
"/lib/ld-musl-aarch64.so.1"
} else {
"/lib/ld-musl-x86_64.so.1"
};
const MUSL_PATH: &str = if cfg!(target_platform = "aarch64") {
"/lib/ld-musl-aarch64.so.1"
} else {
"/lib/ld-musl-x86_64.so.1"
};
if fs::metadata(MUSL_PATH).await.is_err() {
return Err(format!(
"find {}, which is required to run the VS Code Server in musl environments",
MUSL_PATH
));
}
if fs::metadata(MUSL_PATH).await.is_err() {
return Err(format!(
"find {}, which is required to run the VS Code Server in musl environments",
MUSL_PATH
));
}
Ok(())
Ok(())
}
#[allow(dead_code)]
async fn check_glibc_version() -> Result<(), String> {
let ldd_version = capture_command("ldd", ["--version"])
.await
.ok()
.and_then(|o| extract_ldd_version(&o.stdout));
let ldd_version = capture_command("ldd", ["--version"])
.await
.ok()
.and_then(|o| extract_ldd_version(&o.stdout));
if let Some(v) = ldd_version {
return if v.gte(&MIN_LDD_VERSION) {
Ok(())
} else {
Err(format!(
"find GLIBC >= 2.17 (but found {} instead) for GNU environments",
v
))
};
}
if let Some(v) = ldd_version {
return if v.gte(&MIN_LDD_VERSION) {
Ok(())
} else {
Err(format!(
"find GLIBC >= 2.17 (but found {} instead) for GNU environments",
v
))
};
}
Ok(())
Ok(())
}
#[allow(dead_code)]
async fn check_glibcxx_version() -> Result<(), String> {
let mut libstdc_path: Option<String> = None;
let mut libstdc_path: Option<String> = None;
const DEFAULT_LIB_PATH: &str = "/usr/lib64/libstdc++.so.6";
const LDCONFIG_PATH: &str = "/sbin/ldconfig";
const DEFAULT_LIB_PATH: &str = "/usr/lib64/libstdc++.so.6";
const LDCONFIG_PATH: &str = "/sbin/ldconfig";
if fs::metadata(DEFAULT_LIB_PATH).await.is_ok() {
libstdc_path = Some(DEFAULT_LIB_PATH.to_owned());
} else if fs::metadata(LDCONFIG_PATH).await.is_ok() {
libstdc_path = capture_command(LDCONFIG_PATH, ["-p"])
.await
.ok()
.and_then(|o| extract_libstd_from_ldconfig(&o.stdout));
}
if fs::metadata(DEFAULT_LIB_PATH).await.is_ok() {
libstdc_path = Some(DEFAULT_LIB_PATH.to_owned());
} else if fs::metadata(LDCONFIG_PATH).await.is_ok() {
libstdc_path = capture_command(LDCONFIG_PATH, ["-p"])
.await
.ok()
.and_then(|o| extract_libstd_from_ldconfig(&o.stdout));
}
match libstdc_path {
Some(path) => match fs::read(&path).await {
Ok(contents) => check_for_sufficient_glibcxx_versions(contents),
Err(e) => Err(format!(
"validate GLIBCXX version for GNU environments, but could not: {}",
e
)),
},
None => Err("find libstdc++.so or ldconfig for GNU environments".to_owned()),
}
match libstdc_path {
Some(path) => match fs::read(&path).await {
Ok(contents) => check_for_sufficient_glibcxx_versions(contents),
Err(e) => Err(format!(
"validate GLIBCXX version for GNU environments, but could not: {}",
e
)),
},
None => Err("find libstdc++.so or ldconfig for GNU environments".to_owned()),
}
}
#[allow(dead_code)]
fn check_for_sufficient_glibcxx_versions(contents: Vec<u8>) -> Result<(), String> {
let all_versions: Vec<SimpleSemver> = LIBSTD_CXX_VERSION_RE
.captures_iter(&contents)
.map(|m| SimpleSemver {
major: m.get(1).map_or(0, |s| u32_from_bytes(s.as_bytes())),
minor: m.get(2).map_or(0, |s| u32_from_bytes(s.as_bytes())),
patch: m.get(3).map_or(0, |s| u32_from_bytes(s.as_bytes())),
})
.collect();
let all_versions: Vec<SimpleSemver> = LIBSTD_CXX_VERSION_RE
.captures_iter(&contents)
.map(|m| SimpleSemver {
major: m.get(1).map_or(0, |s| u32_from_bytes(s.as_bytes())),
minor: m.get(2).map_or(0, |s| u32_from_bytes(s.as_bytes())),
patch: m.get(3).map_or(0, |s| u32_from_bytes(s.as_bytes())),
})
.collect();
if !all_versions.iter().any(|v| MIN_CXX_VERSION.gte(v)) {
return Err(format!(
"find GLIBCXX >= 3.4.18 (but found {} instead) for GNU environments",
all_versions
.iter()
.map(String::from)
.collect::<Vec<String>>()
.join(", ")
));
}
if !all_versions.iter().any(|v| MIN_CXX_VERSION.gte(v)) {
return Err(format!(
"find GLIBCXX >= 3.4.18 (but found {} instead) for GNU environments",
all_versions
.iter()
.map(String::from)
.collect::<Vec<String>>()
.join(", ")
));
}
Ok(())
Ok(())
}
fn extract_ldd_version(output: &[u8]) -> Option<SimpleSemver> {
LDD_VERSION_RE.captures(output).map(|m| SimpleSemver {
major: m.get(1).map_or(0, |s| u32_from_bytes(s.as_bytes())),
minor: m.get(2).map_or(0, |s| u32_from_bytes(s.as_bytes())),
patch: 0,
})
LDD_VERSION_RE.captures(output).map(|m| SimpleSemver {
major: m.get(1).map_or(0, |s| u32_from_bytes(s.as_bytes())),
minor: m.get(2).map_or(0, |s| u32_from_bytes(s.as_bytes())),
patch: 0,
})
}
fn extract_libstd_from_ldconfig(output: &[u8]) -> Option<String> {
String::from_utf8_lossy(output)
.lines()
.find_map(|l| LDCONFIG_STDC_RE.captures(l))
.and_then(|cap| cap.get(1))
.map(|cap| cap.as_str().to_owned())
String::from_utf8_lossy(output)
.lines()
.find_map(|l| LDCONFIG_STDC_RE.captures(l))
.and_then(|cap| cap.get(1))
.map(|cap| cap.as_str().to_owned())
}
fn u32_from_bytes(b: &[u8]) -> u32 {
String::from_utf8_lossy(b).parse::<u32>().unwrap_or(0)
String::from_utf8_lossy(b).parse::<u32>().unwrap_or(0)
}
#[derive(Debug, PartialEq)]
struct SimpleSemver {
major: u32,
minor: u32,
patch: u32,
major: u32,
minor: u32,
patch: u32,
}
impl From<&SimpleSemver> for String {
fn from(s: &SimpleSemver) -> Self {
format!("v{}.{}.{}", s.major, s.minor, s.patch)
}
fn from(s: &SimpleSemver) -> Self {
format!("v{}.{}.{}", s.major, s.minor, s.patch)
}
}
impl std::fmt::Display for SimpleSemver {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from(self))
}
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", String::from(self))
}
}
#[allow(dead_code)]
impl SimpleSemver {
fn new(major: u32, minor: u32, patch: u32) -> SimpleSemver {
SimpleSemver {
major,
minor,
patch,
}
}
fn new(major: u32, minor: u32, patch: u32) -> SimpleSemver {
SimpleSemver {
major,
minor,
patch,
}
}
fn gte(&self, other: &SimpleSemver) -> bool {
match self.major.cmp(&other.major) {
Ordering::Greater => true,
Ordering::Less => false,
Ordering::Equal => match self.minor.cmp(&other.minor) {
Ordering::Greater => true,
Ordering::Less => false,
Ordering::Equal => self.patch >= other.patch,
},
}
}
fn gte(&self, other: &SimpleSemver) -> bool {
match self.major.cmp(&other.major) {
Ordering::Greater => true,
Ordering::Less => false,
Ordering::Equal => match self.minor.cmp(&other.minor) {
Ordering::Greater => true,
Ordering::Less => false,
Ordering::Equal => self.patch >= other.patch,
},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
#[test]
fn test_extract_libstd_from_ldconfig() {
let actual = "
#[test]
fn test_extract_libstd_from_ldconfig() {
let actual = "
libstoken.so.1 (libc6,x86-64) => /lib/x86_64-linux-gnu/libstoken.so.1
libstemmer.so.0d (libc6,x86-64) => /lib/x86_64-linux-gnu/libstemmer.so.0d
libstdc++.so.6 (libc6,x86-64) => /lib/x86_64-linux-gnu/libstdc++.so.6
@ -261,41 +261,41 @@ mod tests {
libssl3.so (libc6,x86-64) => /lib/x86_64-linux-gnu/libssl3.so
".to_owned().into_bytes();
assert_eq!(
extract_libstd_from_ldconfig(&actual),
Some("/lib/x86_64-linux-gnu/libstdc++.so.6".to_owned()),
);
assert_eq!(
extract_libstd_from_ldconfig(&actual),
Some("/lib/x86_64-linux-gnu/libstdc++.so.6".to_owned()),
);
assert_eq!(
extract_libstd_from_ldconfig(&"nothing here!".to_owned().into_bytes()),
None,
);
}
assert_eq!(
extract_libstd_from_ldconfig(&"nothing here!".to_owned().into_bytes()),
None,
);
}
#[test]
fn test_gte() {
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 2, 3)));
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(0, 10, 10)));
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 1, 10)));
#[test]
fn test_gte() {
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 2, 3)));
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(0, 10, 10)));
assert!(SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 1, 10)));
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 2, 10)));
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 3, 1)));
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(2, 2, 1)));
}
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 2, 10)));
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(1, 3, 1)));
assert!(!SimpleSemver::new(1, 2, 3).gte(&SimpleSemver::new(2, 2, 1)));
}
#[test]
fn check_for_sufficient_glibcxx_versions() {
let actual = "ldd (Ubuntu GLIBC 2.31-0ubuntu9.7) 2.31
#[test]
fn check_for_sufficient_glibcxx_versions() {
let actual = "ldd (Ubuntu GLIBC 2.31-0ubuntu9.7) 2.31
Copyright (C) 2020 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
Written by Roland McGrath and Ulrich Drepper."
.to_owned()
.into_bytes();
.to_owned()
.into_bytes();
assert_eq!(
extract_ldd_version(&actual),
Some(SimpleSemver::new(2, 31, 0)),
);
}
assert_eq!(
extract_ldd_version(&actual),
Some(SimpleSemver::new(2, 31, 0)),
);
}
}

View file

@ -3,87 +3,87 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
use tokio::sync::watch::{
self,
error::{RecvError, SendError},
self,
error::{RecvError, SendError},
};
#[derive(Clone)]
pub struct Barrier<T>(watch::Receiver<Option<T>>)
where
T: Copy;
T: Copy;
impl<T> Barrier<T>
where
T: Copy,
T: Copy,
{
/// Waits for the barrier to be closed, returning a value if one was sent.
pub async fn wait(&mut self) -> Result<T, RecvError> {
loop {
if let Err(e) = self.0.changed().await {
return Err(e);
}
/// Waits for the barrier to be closed, returning a value if one was sent.
pub async fn wait(&mut self) -> Result<T, RecvError> {
loop {
if let Err(e) = self.0.changed().await {
return Err(e);
}
if let Some(v) = *(self.0.borrow()) {
return Ok(v);
}
}
}
if let Some(v) = *(self.0.borrow()) {
return Ok(v);
}
}
}
}
pub struct BarrierOpener<T>(watch::Sender<Option<T>>);
impl<T> BarrierOpener<T> {
/// Closes the barrier.
pub fn open(self, value: T) -> Result<(), SendError<Option<T>>> {
self.0.send(Some(value))
}
/// Closes the barrier.
pub fn open(self, value: T) -> Result<(), SendError<Option<T>>> {
self.0.send(Some(value))
}
}
/// The Barrier is something that can be opened once from one side,
/// and is thereafter permanently closed. It can contain a value.
pub fn new_barrier<T>() -> (Barrier<T>, BarrierOpener<T>)
where
T: Copy,
T: Copy,
{
let (closed_tx, closed_rx) = watch::channel(None);
(Barrier(closed_rx), BarrierOpener(closed_tx))
let (closed_tx, closed_rx) = watch::channel(None);
(Barrier(closed_rx), BarrierOpener(closed_tx))
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
#[tokio::test]
async fn test_barrier_close_after_spawn() {
let (mut barrier, opener) = new_barrier::<u32>();
let (tx, rx) = tokio::sync::oneshot::channel::<u32>();
#[tokio::test]
async fn test_barrier_close_after_spawn() {
let (mut barrier, opener) = new_barrier::<u32>();
let (tx, rx) = tokio::sync::oneshot::channel::<u32>();
tokio::spawn(async move {
tx.send(barrier.wait().await.unwrap()).unwrap();
});
tokio::spawn(async move {
tx.send(barrier.wait().await.unwrap()).unwrap();
});
opener.open(42).unwrap();
opener.open(42).unwrap();
assert!(rx.await.unwrap() == 42);
}
assert!(rx.await.unwrap() == 42);
}
#[tokio::test]
async fn test_barrier_close_before_spawn() {
let (barrier, opener) = new_barrier::<u32>();
let (tx1, rx1) = tokio::sync::oneshot::channel::<u32>();
let (tx2, rx2) = tokio::sync::oneshot::channel::<u32>();
#[tokio::test]
async fn test_barrier_close_before_spawn() {
let (barrier, opener) = new_barrier::<u32>();
let (tx1, rx1) = tokio::sync::oneshot::channel::<u32>();
let (tx2, rx2) = tokio::sync::oneshot::channel::<u32>();
opener.open(42).unwrap();
let mut b1 = barrier.clone();
tokio::spawn(async move {
tx1.send(b1.wait().await.unwrap()).unwrap();
});
let mut b2 = barrier.clone();
tokio::spawn(async move {
tx2.send(b2.wait().await.unwrap()).unwrap();
});
opener.open(42).unwrap();
let mut b1 = barrier.clone();
tokio::spawn(async move {
tx1.send(b1.wait().await.unwrap()).unwrap();
});
let mut b2 = barrier.clone();
tokio::spawn(async move {
tx2.send(b2.wait().await.unwrap()).unwrap();
});
assert!(rx1.await.unwrap() == 42);
assert!(rx2.await.unwrap() == 42);
}
assert!(rx1.await.unwrap() == 42);
assert!(rx2.await.unwrap() == 42);
}
}

View file

@ -12,41 +12,41 @@ use tar::Archive;
use super::io::ReportCopyProgress;
pub fn decompress_tarball<T>(
path: &Path,
parent_path: &Path,
mut reporter: T,
path: &Path,
parent_path: &Path,
mut reporter: T,
) -> Result<(), WrappedError>
where
T: ReportCopyProgress,
T: ReportCopyProgress,
{
let tar_gz = File::open(path).map_err(|e| {
wrap(
Box::new(e),
format!("error opening file {}", path.display()),
)
})?;
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
let tar_gz = File::open(path).map_err(|e| {
wrap(
Box::new(e),
format!("error opening file {}", path.display()),
)
})?;
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
let results = archive
.entries()
.map_err(|e| wrap(e, format!("error opening archive {}", path.display())))?
.filter_map(|e| e.ok())
.map(|mut entry| {
let entry_path = entry
.path()
.map_err(|e| wrap(e, "error reading entry path"))?;
let results = archive
.entries()
.map_err(|e| wrap(e, format!("error opening archive {}", path.display())))?
.filter_map(|e| e.ok())
.map(|mut entry| {
let entry_path = entry
.path()
.map_err(|e| wrap(e, "error reading entry path"))?;
let path = parent_path.join(entry_path.iter().skip(1).collect::<PathBuf>());
entry
.unpack(&path)
.map_err(|e| wrap(e, format!("error unpacking {}", path.display())))?;
Ok(path)
})
.collect::<Result<Vec<PathBuf>, WrappedError>>()?;
let path = parent_path.join(entry_path.iter().skip(1).collect::<PathBuf>());
entry
.unpack(&path)
.map_err(|e| wrap(e, format!("error unpacking {}", path.display())))?;
Ok(path)
})
.collect::<Result<Vec<PathBuf>, WrappedError>>()?;
// Tarballs don't have a way to get the number of entries ahead of time
reporter.report_progress(results.len() as u64, results.len() as u64);
// Tarballs don't have a way to get the number of entries ahead of time
reporter.report_progress(results.len() as u64, results.len() as u64);
Ok(())
Ok(())
}

View file

@ -16,140 +16,140 @@ use zip::{self, ZipArchive};
/// Returns whether all files in the archive start with the same path segment.
/// If so, it's an indication we should skip that segment when extracting.
fn should_skip_first_segment(archive: &mut ZipArchive<File>) -> bool {
let first_name = {
let file = archive
.by_index_raw(0)
.expect("expected not to have an empty archive");
let first_name = {
let file = archive
.by_index_raw(0)
.expect("expected not to have an empty archive");
let path = file
.enclosed_name()
.expect("expected to have path")
.iter()
.next()
.expect("expected to have non-empty name");
let path = file
.enclosed_name()
.expect("expected to have path")
.iter()
.next()
.expect("expected to have non-empty name");
path.to_owned()
};
path.to_owned()
};
for i in 1..archive.len() {
if let Ok(file) = archive.by_index_raw(i) {
if let Some(name) = file.enclosed_name() {
if name.iter().next() != Some(&first_name) {
return false;
}
}
}
}
for i in 1..archive.len() {
if let Ok(file) = archive.by_index_raw(i) {
if let Some(name) = file.enclosed_name() {
if name.iter().next() != Some(&first_name) {
return false;
}
}
}
}
true
true
}
pub fn unzip_file<T>(path: &Path, parent_path: &Path, mut reporter: T) -> Result<(), WrappedError>
where
T: ReportCopyProgress,
T: ReportCopyProgress,
{
let file = fs::File::open(path)
.map_err(|e| wrap(e, format!("unable to open file {}", path.display())))?;
let file = fs::File::open(path)
.map_err(|e| wrap(e, format!("unable to open file {}", path.display())))?;
let mut archive = zip::ZipArchive::new(file)
.map_err(|e| wrap(e, format!("failed to open zip archive {}", path.display())))?;
let mut archive = zip::ZipArchive::new(file)
.map_err(|e| wrap(e, format!("failed to open zip archive {}", path.display())))?;
let skip_segments_no = if should_skip_first_segment(&mut archive) {
1
} else {
0
};
let skip_segments_no = if should_skip_first_segment(&mut archive) {
1
} else {
0
};
for i in 0..archive.len() {
reporter.report_progress(i as u64, archive.len() as u64);
let mut file = archive
.by_index(i)
.map_err(|e| wrap(e, format!("could not open zip entry {}", i)))?;
for i in 0..archive.len() {
reporter.report_progress(i as u64, archive.len() as u64);
let mut file = archive
.by_index(i)
.map_err(|e| wrap(e, format!("could not open zip entry {}", i)))?;
let outpath: PathBuf = match file.enclosed_name() {
Some(path) => {
let mut full_path = PathBuf::from(parent_path);
full_path.push(PathBuf::from_iter(path.iter().skip(skip_segments_no)));
full_path
}
None => continue,
};
let outpath: PathBuf = match file.enclosed_name() {
Some(path) => {
let mut full_path = PathBuf::from(parent_path);
full_path.push(PathBuf::from_iter(path.iter().skip(skip_segments_no)));
full_path
}
None => continue,
};
if file.is_dir() || file.name().ends_with('/') {
fs::create_dir_all(&outpath)
.map_err(|e| wrap(e, format!("could not create dir for {}", outpath.display())))?;
apply_permissions(&file, &outpath)?;
continue;
}
if file.is_dir() || file.name().ends_with('/') {
fs::create_dir_all(&outpath)
.map_err(|e| wrap(e, format!("could not create dir for {}", outpath.display())))?;
apply_permissions(&file, &outpath)?;
continue;
}
if let Some(p) = outpath.parent() {
fs::create_dir_all(&p)
.map_err(|e| wrap(e, format!("could not create dir for {}", outpath.display())))?;
}
if let Some(p) = outpath.parent() {
fs::create_dir_all(&p)
.map_err(|e| wrap(e, format!("could not create dir for {}", outpath.display())))?;
}
#[cfg(unix)]
{
use libc::S_IFLNK;
use std::io::Read;
use std::os::unix::ffi::OsStringExt;
#[cfg(unix)]
{
use libc::S_IFLNK;
use std::io::Read;
use std::os::unix::ffi::OsStringExt;
if matches!(file.unix_mode(), Some(mode) if mode & (S_IFLNK as u32) == (S_IFLNK as u32))
{
let mut link_to = Vec::new();
file.read_to_end(&mut link_to).map_err(|e| {
wrap(
e,
format!("could not read symlink linkpath {}", outpath.display()),
)
})?;
if matches!(file.unix_mode(), Some(mode) if mode & (S_IFLNK as u32) == (S_IFLNK as u32))
{
let mut link_to = Vec::new();
file.read_to_end(&mut link_to).map_err(|e| {
wrap(
e,
format!("could not read symlink linkpath {}", outpath.display()),
)
})?;
let link_path = PathBuf::from(std::ffi::OsString::from_vec(link_to));
std::os::unix::fs::symlink(link_path, &outpath).map_err(|e| {
wrap(e, format!("could not create symlink {}", outpath.display()))
})?;
continue;
}
}
let link_path = PathBuf::from(std::ffi::OsString::from_vec(link_to));
std::os::unix::fs::symlink(link_path, &outpath).map_err(|e| {
wrap(e, format!("could not create symlink {}", outpath.display()))
})?;
continue;
}
}
let mut outfile = fs::File::create(&outpath).map_err(|e| {
wrap(
e,
format!(
"unable to open file to write {} (from {:?})",
outpath.display(),
file.enclosed_name().map(|p| p.to_string_lossy()),
),
)
})?;
let mut outfile = fs::File::create(&outpath).map_err(|e| {
wrap(
e,
format!(
"unable to open file to write {} (from {:?})",
outpath.display(),
file.enclosed_name().map(|p| p.to_string_lossy()),
),
)
})?;
io::copy(&mut file, &mut outfile)
.map_err(|e| wrap(e, format!("error copying file {}", outpath.display())))?;
io::copy(&mut file, &mut outfile)
.map_err(|e| wrap(e, format!("error copying file {}", outpath.display())))?;
apply_permissions(&file, &outpath)?;
}
apply_permissions(&file, &outpath)?;
}
reporter.report_progress(archive.len() as u64, archive.len() as u64);
reporter.report_progress(archive.len() as u64, archive.len() as u64);
Ok(())
Ok(())
}
#[cfg(unix)]
fn apply_permissions(file: &ZipFile, outpath: &Path) -> Result<(), WrappedError> {
use std::os::unix::fs::PermissionsExt;
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode)).map_err(|e| {
wrap(
e,
format!("error setting permissions on {}", outpath.display()),
)
})?;
}
if let Some(mode) = file.unix_mode() {
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode)).map_err(|e| {
wrap(
e,
format!("error setting permissions on {}", outpath.display()),
)
})?;
}
Ok(())
Ok(())
}
#[cfg(windows)]
fn apply_permissions(_file: &ZipFile, _outpath: &Path) -> Result<(), WrappedError> {
Ok(())
Ok(())
}