Publish to new CDN (#198154)

This commit is contained in:
João Moreno 2023-11-16 07:19:59 +01:00 committed by GitHub
parent 444e73750b
commit 0b111a09a1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1812 additions and 748 deletions

View file

@ -26,10 +26,10 @@ steps:
artifact: ${{ target }}
path: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll sign-darwin $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
displayName: Codesign
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll notarize-darwin $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/pkg "*.zip"
displayName: Notarize
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:

View file

@ -42,8 +42,8 @@ steps:
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/sign "*.exe"
displayName: Codesign executable
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Build.ArtifactStagingDirectory)/sign "*.exe"
displayName: Codesign
- ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}:
- powershell: |

File diff suppressed because one or more lines are too long

View file

@ -1,283 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import { BlobServiceClient, BlockBlobParallelUploadOptions, StoragePipelineOptions, StorageRetryPolicyType } from '@azure/storage-blob';
import * as mime from 'mime';
import { CosmosClient } from '@azure/cosmos';
import { ClientSecretCredential } from '@azure/identity';
import { retry } from './retry';
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl?: string;
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
if (process.argv.length !== 8) {
console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
process.exit(-1);
}
// Contains all of the logic for mapping details to our actual product names in CosmosDB
function getPlatform(product: string, os: string, arch: string, type: string): string {
switch (os) {
case 'win32':
switch (product) {
case 'client': {
switch (type) {
case 'archive':
return `win32-${arch}-archive`;
case 'setup':
return `win32-${arch}`;
case 'user-setup':
return `win32-${arch}-user`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
case 'server':
if (arch === 'arm64') {
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return `server-win32-${arch}`;
case 'web':
if (arch === 'arm64') {
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return `server-win32-${arch}-web`;
case 'cli':
return `cli-win32-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'alpine':
switch (product) {
case 'server':
return `server-alpine-${arch}`;
case 'web':
return `server-alpine-${arch}-web`;
case 'cli':
return `cli-alpine-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'linux':
switch (type) {
case 'snap':
return `linux-snap-${arch}`;
case 'archive-unsigned':
switch (product) {
case 'client':
return `linux-${arch}`;
case 'server':
return `server-linux-${arch}`;
case 'web':
return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'deb-package':
return `linux-deb-${arch}`;
case 'rpm-package':
return `linux-rpm-${arch}`;
case 'cli':
return `cli-linux-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'darwin':
switch (product) {
case 'client':
if (arch === 'x64') {
return 'darwin';
}
return `darwin-${arch}`;
case 'server':
if (arch === 'x64') {
return 'server-darwin';
}
return `server-darwin-${arch}`;
case 'web':
if (arch === 'x64') {
return 'server-darwin-web';
}
return `server-darwin-${arch}-web`;
case 'cli':
return `cli-darwin-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
// Contains all of the logic for mapping types to our actual types in CosmosDB
function getRealType(type: string) {
switch (type) {
case 'user-setup':
return 'setup';
case 'deb-package':
case 'rpm-package':
return 'package';
default:
return type;
}
}
function hashStream(hashName: string, stream: Readable): Promise<string> {
return new Promise<string>((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
function getEnv(name: string): string {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main(): Promise<void> {
const [, , product, os, arch, unprocessedType, fileName, filePath] = process.argv;
// getPlatform needs the unprocessedType
const platform = getPlatform(product, os, arch, unprocessedType);
const type = getRealType(unprocessedType);
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(filePath);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + fileName;
const storagePipelineOptions: StoragePipelineOptions = { retryOptions: { retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, maxTries: 6, tryTimeoutInMs: 10 * 60 * 1000 } };
const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
const blobServiceClient = new BlobServiceClient(`https://vscode.blob.core.windows.net`, credential, storagePipelineOptions);
const containerClient = blobServiceClient.getContainerClient(quality);
const blobClient = containerClient.getBlockBlobClient(blobName);
const blobOptions: BlockBlobParallelUploadOptions = {
blobHTTPHeaders: {
blobContentType: mime.lookup(filePath),
blobContentDisposition: `attachment; filename="${fileName}"`,
blobCacheControl: 'max-age=31536000, public'
}
};
const uploadPromises: Promise<void>[] = [];
uploadPromises.push((async () => {
console.log(`Checking for blob in Azure...`);
if (await retry(() => blobClient.exists())) {
throw new Error(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
} else {
await retry(async (attempt) => {
console.log(`Uploading blobs to Azure storage (attempt ${attempt})...`);
await blobClient.uploadFile(filePath, blobOptions);
console.log('Blob successfully uploaded to Azure storage.');
});
}
})());
const shouldUploadToMooncake = /true/i.test(process.env['VSCODE_PUBLISH_TO_MOONCAKE'] ?? 'true');
if (shouldUploadToMooncake) {
const mooncakeCredential = new ClientSecretCredential(process.env['AZURE_MOONCAKE_TENANT_ID']!, process.env['AZURE_MOONCAKE_CLIENT_ID']!, process.env['AZURE_MOONCAKE_CLIENT_SECRET']!);
const mooncakeBlobServiceClient = new BlobServiceClient(`https://vscode.blob.core.chinacloudapi.cn`, mooncakeCredential, storagePipelineOptions);
const mooncakeContainerClient = mooncakeBlobServiceClient.getContainerClient(quality);
const mooncakeBlobClient = mooncakeContainerClient.getBlockBlobClient(blobName);
uploadPromises.push((async () => {
console.log(`Checking for blob in Mooncake Azure...`);
if (await retry(() => mooncakeBlobClient.exists())) {
throw new Error(`Mooncake Blob ${quality}, ${blobName} already exists, not publishing again.`);
} else {
await retry(async (attempt) => {
console.log(`Uploading blobs to Mooncake Azure storage (attempt ${attempt})...`);
await mooncakeBlobClient.uploadFile(filePath, blobOptions);
console.log('Blob successfully uploaded to Mooncake Azure storage.');
});
}
})());
}
const promiseResults = await Promise.allSettled(uploadPromises);
const rejectedPromiseResults = promiseResults.filter(result => result.status === 'rejected') as PromiseRejectedResult[];
if (rejectedPromiseResults.length === 0) {
console.log('All blobs successfully uploaded.');
} else if (rejectedPromiseResults[0]?.reason?.message?.includes('already exists')) {
console.warn(rejectedPromiseResults[0].reason.message);
console.log('Some blobs successfully uploaded.');
} else {
// eslint-disable-next-line no-throw-literal
throw rejectedPromiseResults[0]?.reason;
}
const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
const blobPath = new URL(assetUrl).pathname;
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
const asset: Asset = {
platform,
type,
url: assetUrl,
hash: sha1hash,
mooncakeUrl,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials: credential });
const scripts = client.database('builds').container(quality).scripts;
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
console.log(` Done ✔️`);
}
main().then(() => {
console.log('Asset successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,869 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as path from 'path';
import fetch, { RequestInit } from 'node-fetch';
import { Readable } from 'stream';
import { pipeline } from 'node:stream/promises';
import * as yauzl from 'yauzl';
import * as crypto from 'crypto';
import { retry } from './retry';
import { BlobServiceClient, BlockBlobParallelUploadOptions, StoragePipelineOptions, StorageRetryPolicyType } from '@azure/storage-blob';
import * as mime from 'mime';
import { CosmosClient } from '@azure/cosmos';
import { ClientSecretCredential } from '@azure/identity';
import * as cp from 'child_process';
import * as os from 'os';
function e(name: string): string {
const result = process.env[name];
if (typeof result !== 'string') {
throw new Error(`Missing env: ${name}`);
}
return result;
}
class Temp {
private _files: string[] = [];
tmpNameSync(): string {
const file = path.join(os.tmpdir(), crypto.randomBytes(20).toString('hex'));
this._files.push(file);
return file;
}
dispose(): void {
for (const file of this._files) {
try {
fs.unlinkSync(file);
} catch (err) {
// noop
}
}
}
}
class Sequencer {
private current: Promise<unknown> = Promise.resolve(null);
queue<T>(promiseTask: () => Promise<T>): Promise<T> {
return this.current = this.current.then(() => promiseTask(), () => promiseTask());
}
}
interface RequestOptions {
readonly body?: string;
}
interface CreateProvisionedFilesSuccessResponse {
IsSuccess: true;
ErrorDetails: null;
}
interface CreateProvisionedFilesErrorResponse {
IsSuccess: false;
ErrorDetails: {
Code: string;
Category: string;
Message: string;
CanRetry: boolean;
AdditionalProperties: Record<string, string>;
};
}
type CreateProvisionedFilesResponse = CreateProvisionedFilesSuccessResponse | CreateProvisionedFilesErrorResponse;
class ProvisionService {
constructor(
private readonly log: (...args: any[]) => void,
private readonly accessToken: string
) { }
async provision(releaseId: string, fileId: string, fileName: string) {
const body = JSON.stringify({
ReleaseId: releaseId,
PortalName: 'VSCode',
PublisherCode: 'VSCode',
ProvisionedFilesCollection: [{
PublisherKey: fileId,
IsStaticFriendlyFileName: true,
FriendlyFileName: fileName,
MaxTTL: '1440',
CdnMappings: ['ECN']
}]
});
this.log(`Provisioning ${fileName} (releaseId: ${releaseId}, fileId: ${fileId})...`);
const res = await retry(() => this.request<CreateProvisionedFilesResponse>('POST', '/api/v2/ProvisionedFiles/CreateProvisionedFiles', { body }));
if (!res.IsSuccess) {
throw new Error(`Failed to submit provisioning request: ${JSON.stringify(res.ErrorDetails)}`);
}
this.log(`Successfully provisioned ${fileName}`);
}
private async request<T>(method: string, url: string, options?: RequestOptions): Promise<T> {
const opts: RequestInit = {
method,
body: options?.body,
headers: {
Authorization: `Bearer ${this.accessToken}`,
'Content-Type': 'application/json'
}
};
const res = await fetch(`https://dsprovisionapi.microsoft.com${url}`, opts);
if (!res.ok || res.status < 200 || res.status >= 500) {
throw new Error(`Unexpected status code: ${res.status}`);
}
return await res.json();
}
}
function hashStream(hashName: string, stream: Readable): Promise<string> {
return new Promise<string>((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
interface Release {
readonly releaseId: string;
readonly fileId: string;
}
interface SubmitReleaseResult {
submissionResponse: {
operationId: string;
statusCode: string;
};
}
interface ReleaseDetailsResult {
releaseDetails: [{
fileDetails: [{ publisherKey: string }];
statusCode: 'inprogress' | 'pass';
}];
}
class ESRPClient {
private static Sequencer = new Sequencer();
private readonly authPath: string;
constructor(
private readonly log: (...args: any[]) => void,
private readonly tmp: Temp,
tenantId: string,
clientId: string,
authCertSubjectName: string,
requestSigningCertSubjectName: string,
) {
this.authPath = this.tmp.tmpNameSync();
fs.writeFileSync(this.authPath, JSON.stringify({
Version: '1.0.0',
AuthenticationType: 'AAD_CERT',
TenantId: tenantId,
ClientId: clientId,
AuthCert: {
SubjectName: authCertSubjectName,
StoreLocation: 'LocalMachine',
StoreName: 'My',
SendX5c: 'true'
},
RequestSigningCert: {
SubjectName: requestSigningCertSubjectName,
StoreLocation: 'LocalMachine',
StoreName: 'My'
}
}));
}
async release(
version: string,
filePath: string
): Promise<Release> {
const submitReleaseResult = await ESRPClient.Sequencer.queue(async () => {
this.log(`Submitting release for ${version}: ${filePath}`);
return await this.SubmitRelease(version, filePath);
});
if (submitReleaseResult.submissionResponse.statusCode !== 'pass') {
throw new Error(`Unexpected status code: ${submitReleaseResult.submissionResponse.statusCode}`);
}
const releaseId = submitReleaseResult.submissionResponse.operationId;
this.log(`Successfully submitted release ${releaseId}. Polling for completion...`);
let details!: ReleaseDetailsResult;
// Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times
for (let i = 0; i < 720; i++) {
details = await this.ReleaseDetails(releaseId);
if (details.releaseDetails[0].statusCode === 'pass') {
break;
} else if (details.releaseDetails[0].statusCode !== 'inprogress') {
throw new Error(`Failed to submit release: ${JSON.stringify(details)}`);
}
await new Promise(c => setTimeout(c, 5000));
}
if (details.releaseDetails[0].statusCode !== 'pass') {
throw new Error(`Timed out waiting for release ${releaseId}: ${JSON.stringify(details)}`);
}
const fileId = details.releaseDetails[0].fileDetails[0].publisherKey;
this.log('Release completed successfully with fileId: ', fileId);
return { releaseId, fileId };
}
private async SubmitRelease(
version: string,
filePath: string
): Promise<SubmitReleaseResult> {
const policyPath = this.tmp.tmpNameSync();
fs.writeFileSync(policyPath, JSON.stringify({
Version: '1.0.0',
Audience: 'InternalLimited',
Intent: 'distribution',
ContentType: 'InstallPackage'
}));
const inputPath = this.tmp.tmpNameSync();
const size = fs.statSync(filePath).size;
const istream = fs.createReadStream(filePath);
const sha256 = await hashStream('sha256', istream);
fs.writeFileSync(inputPath, JSON.stringify({
Version: '1.0.0',
ReleaseInfo: {
ReleaseMetadata: {
Title: 'VS Code',
Properties: {
ReleaseContentType: 'InstallPackage'
},
MinimumNumberOfApprovers: 1
},
ProductInfo: {
Name: 'VS Code',
Version: version,
Description: path.basename(filePath, path.extname(filePath)),
},
Owners: [
{
Owner: {
UserPrincipalName: 'jomo@microsoft.com'
}
}
],
Approvers: [
{
Approver: {
UserPrincipalName: 'jomo@microsoft.com'
},
IsAutoApproved: true,
IsMandatory: false
}
],
AccessPermissions: {
MainPublisher: 'VSCode',
ChannelDownloadEntityDetails: {
Consumer: ['VSCode']
}
},
CreatedBy: {
UserPrincipalName: 'jomo@microsoft.com'
}
},
ReleaseBatches: [
{
ReleaseRequestFiles: [
{
SizeInBytes: size,
SourceHash: sha256,
HashType: 'SHA256',
SourceLocation: path.basename(filePath)
}
],
SourceLocationType: 'UNC',
SourceRootDirectory: path.dirname(filePath),
DestinationLocationType: 'AzureBlob'
}
]
}));
const outputPath = this.tmp.tmpNameSync();
cp.execSync(`ESRPClient SubmitRelease -a ${this.authPath} -p ${policyPath} -i ${inputPath} -o ${outputPath}`, { stdio: 'inherit' });
const output = fs.readFileSync(outputPath, 'utf8');
return JSON.parse(output) as SubmitReleaseResult;
}
private async ReleaseDetails(
releaseId: string
): Promise<ReleaseDetailsResult> {
const inputPath = this.tmp.tmpNameSync();
fs.writeFileSync(inputPath, JSON.stringify({
Version: '1.0.0',
OperationIds: [releaseId]
}));
const outputPath = this.tmp.tmpNameSync();
cp.execSync(`ESRPClient ReleaseDetails -a ${this.authPath} -i ${inputPath} -o ${outputPath}`, { stdio: 'inherit' });
const output = fs.readFileSync(outputPath, 'utf8');
return JSON.parse(output) as ReleaseDetailsResult;
}
}
async function releaseAndProvision(
log: (...args: any[]) => void,
releaseTenantId: string,
releaseClientId: string,
releaseAuthCertSubjectName: string,
releaseRequestSigningCertSubjectName: string,
provisionTenantId: string,
provisionAADUsername: string,
provisionAADPassword: string,
version: string,
quality: string,
filePath: string
): Promise<string> {
const fileName = `${quality}/${version}/${path.basename(filePath)}`;
const result = `${e('PRSS_CDN_URL')}/${fileName}`;
const res = await retry(() => fetch(result));
if (res.status === 200) {
log(`Already released and provisioned: ${result}`);
return result;
}
const tmp = new Temp();
process.on('exit', () => tmp.dispose());
const esrpclient = new ESRPClient(log, tmp, releaseTenantId, releaseClientId, releaseAuthCertSubjectName, releaseRequestSigningCertSubjectName);
const release = await esrpclient.release(version, filePath);
const credential = new ClientSecretCredential(provisionTenantId, provisionAADUsername, provisionAADPassword);
const accessToken = await credential.getToken(['https://microsoft.onmicrosoft.com/DS.Provisioning.WebApi/.default']);
const service = new ProvisionService(log, accessToken.token);
await service.provision(release.releaseId, release.fileId, fileName);
return result;
}
class State {
private statePath: string;
private set = new Set<string>();
constructor() {
const pipelineWorkspacePath = e('PIPELINE_WORKSPACE');
const previousState = fs.readdirSync(pipelineWorkspacePath)
.map(name => /^artifacts_processed_(\d+)$/.exec(name))
.filter((match): match is RegExpExecArray => !!match)
.map(match => ({ name: match![0], attempt: Number(match![1]) }))
.sort((a, b) => b.attempt - a.attempt)[0];
if (previousState) {
const previousStatePath = path.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt');
fs.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name));
}
const stageAttempt = e('SYSTEM_STAGEATTEMPT');
this.statePath = path.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`);
fs.mkdirSync(path.dirname(this.statePath), { recursive: true });
fs.writeFileSync(this.statePath, [...this.set.values()].join('\n'));
}
get size(): number {
return this.set.size;
}
has(name: string): boolean {
return this.set.has(name);
}
add(name: string): void {
this.set.add(name);
fs.appendFileSync(this.statePath, `${name}\n`);
}
[Symbol.iterator](): IterableIterator<string> {
return this.set[Symbol.iterator]();
}
}
const azdoFetchOptions = { headers: { Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}` }, timeout: 60_000 };
async function requestAZDOAPI<T>(path: string): Promise<T> {
const res = await fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, azdoFetchOptions);
if (!res.ok) {
throw new Error(`Unexpected status code: ${res.status}`);
}
return await Promise.race([
res.json(),
new Promise((_, reject) => setTimeout(() => reject(new Error('Timeout')), 60_000))
]);
}
interface Artifact {
readonly name: string;
readonly resource: {
readonly downloadUrl: string;
readonly properties: {
readonly artifactsize: number;
};
};
}
async function getPipelineArtifacts(): Promise<Artifact[]> {
const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts');
return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name));
}
interface Timeline {
readonly records: {
readonly name: string;
readonly type: string;
readonly state: string;
}[];
}
async function getPipelineTimeline(): Promise<Timeline> {
return await requestAZDOAPI<Timeline>('timeline');
}
async function downloadArtifact(artifact: Artifact, downloadPath: string): Promise<void> {
const res = await fetch(artifact.resource.downloadUrl, azdoFetchOptions);
if (!res.ok) {
throw new Error(`Unexpected status code: ${res.status}`);
}
await Promise.race([
pipeline(res.body, fs.createWriteStream(downloadPath)),
new Promise((_, reject) => setTimeout(() => reject(new Error('Timeout')), 5 * 60 * 1000))
]);
}
async function unzip(packagePath: string, outputPath: string): Promise<string> {
return new Promise((resolve, reject) => {
yauzl.open(packagePath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
return reject(err);
}
zipfile!.on('entry', entry => {
if (/\/$/.test(entry.fileName)) {
zipfile!.readEntry();
} else {
zipfile!.openReadStream(entry, (err, istream) => {
if (err) {
return reject(err);
}
const filePath = path.join(outputPath, entry.fileName);
fs.mkdirSync(path.dirname(filePath), { recursive: true });
const ostream = fs.createWriteStream(filePath);
ostream.on('finish', () => {
zipfile!.close();
resolve(filePath);
});
istream?.on('error', err => reject(err));
istream!.pipe(ostream);
});
}
});
zipfile!.readEntry();
});
});
}
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl?: string;
prssUrl?: string;
hash: string;
sha256hash: string;
size: number;
supportsFastUpdate?: boolean;
}
// Contains all of the logic for mapping details to our actual product names in CosmosDB
function getPlatform(product: string, os: string, arch: string, type: string): string {
switch (os) {
case 'win32':
switch (product) {
case 'client': {
switch (type) {
case 'archive':
return `win32-${arch}-archive`;
case 'setup':
return `win32-${arch}`;
case 'user-setup':
return `win32-${arch}-user`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
case 'server':
if (arch === 'arm64') {
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return `server-win32-${arch}`;
case 'web':
if (arch === 'arm64') {
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
return `server-win32-${arch}-web`;
case 'cli':
return `cli-win32-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'alpine':
switch (product) {
case 'server':
return `server-alpine-${arch}`;
case 'web':
return `server-alpine-${arch}-web`;
case 'cli':
return `cli-alpine-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'linux':
switch (type) {
case 'snap':
return `linux-snap-${arch}`;
case 'archive-unsigned':
switch (product) {
case 'client':
return `linux-${arch}`;
case 'server':
return `server-linux-${arch}`;
case 'web':
return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'deb-package':
return `linux-deb-${arch}`;
case 'rpm-package':
return `linux-rpm-${arch}`;
case 'cli':
return `cli-linux-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
case 'darwin':
switch (product) {
case 'client':
if (arch === 'x64') {
return 'darwin';
}
return `darwin-${arch}`;
case 'server':
if (arch === 'x64') {
return 'server-darwin';
}
return `server-darwin-${arch}`;
case 'web':
if (arch === 'x64') {
return 'server-darwin-web';
}
return `server-darwin-${arch}-web`;
case 'cli':
return `cli-darwin-${arch}`;
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
default:
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
}
}
// Contains all of the logic for mapping types to our actual types in CosmosDB
function getRealType(type: string) {
switch (type) {
case 'user-setup':
return 'setup';
case 'deb-package':
case 'rpm-package':
return 'package';
default:
return type;
}
}
const azureSequencer = new Sequencer();
const mooncakeSequencer = new Sequencer();
async function uploadAssetLegacy(log: (...args: any[]) => void, quality: string, commit: string, filePath: string): Promise<{ assetUrl: string; mooncakeUrl: string }> {
const fileName = path.basename(filePath);
const blobName = commit + '/' + fileName;
const storagePipelineOptions: StoragePipelineOptions = { retryOptions: { retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, maxTries: 6, tryTimeoutInMs: 10 * 60 * 1000 } };
const credential = new ClientSecretCredential(e('AZURE_TENANT_ID'), e('AZURE_CLIENT_ID'), e('AZURE_CLIENT_SECRET'));
const blobServiceClient = new BlobServiceClient(`https://vscode.blob.core.windows.net`, credential, storagePipelineOptions);
const containerClient = blobServiceClient.getContainerClient(quality);
const blobClient = containerClient.getBlockBlobClient(blobName);
const blobOptions: BlockBlobParallelUploadOptions = {
blobHTTPHeaders: {
blobContentType: mime.lookup(filePath),
blobContentDisposition: `attachment; filename="${fileName}"`,
blobCacheControl: 'max-age=31536000, public'
}
};
const uploadPromises: Promise<void>[] = [];
uploadPromises.push((async (): Promise<void> => {
log(`Checking for blob in Azure...`);
if (await retry(() => blobClient.exists())) {
throw new Error(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
} else {
await retry(attempt => azureSequencer.queue(async () => {
log(`Uploading blobs to Azure storage (attempt ${attempt})...`);
await blobClient.uploadFile(filePath, blobOptions);
log('Blob successfully uploaded to Azure storage.');
}));
}
})());
const shouldUploadToMooncake = /true/i.test(e('VSCODE_PUBLISH_TO_MOONCAKE'));
if (shouldUploadToMooncake) {
const mooncakeCredential = new ClientSecretCredential(e('AZURE_MOONCAKE_TENANT_ID'), e('AZURE_MOONCAKE_CLIENT_ID'), e('AZURE_MOONCAKE_CLIENT_SECRET'));
const mooncakeBlobServiceClient = new BlobServiceClient(`https://vscode.blob.core.chinacloudapi.cn`, mooncakeCredential, storagePipelineOptions);
const mooncakeContainerClient = mooncakeBlobServiceClient.getContainerClient(quality);
const mooncakeBlobClient = mooncakeContainerClient.getBlockBlobClient(blobName);
uploadPromises.push((async (): Promise<void> => {
log(`Checking for blob in Mooncake Azure...`);
if (await retry(() => mooncakeBlobClient.exists())) {
throw new Error(`Mooncake Blob ${quality}, ${blobName} already exists, not publishing again.`);
} else {
await retry(attempt => mooncakeSequencer.queue(async () => {
log(`Uploading blobs to Mooncake Azure storage (attempt ${attempt})...`);
await mooncakeBlobClient.uploadFile(filePath, blobOptions);
log('Blob successfully uploaded to Mooncake Azure storage.');
}));
}
})());
}
const promiseResults = await Promise.allSettled(uploadPromises);
const rejectedPromiseResults = promiseResults.filter(result => result.status === 'rejected') as PromiseRejectedResult[];
if (rejectedPromiseResults.length === 0) {
log('All blobs successfully uploaded.');
} else if (rejectedPromiseResults[0]?.reason?.message?.includes('already exists')) {
log(rejectedPromiseResults[0].reason.message);
log('Some blobs successfully uploaded.');
} else {
// eslint-disable-next-line no-throw-literal
throw rejectedPromiseResults[0]?.reason;
}
const assetUrl = `${e('AZURE_CDN_URL')}/${quality}/${blobName}`;
const blobPath = new URL(assetUrl).pathname;
const mooncakeUrl = `${e('MOONCAKE_CDN_URL')}${blobPath}`;
return { assetUrl, mooncakeUrl };
}
const downloadSequencer = new Sequencer();
const cosmosSequencer = new Sequencer();
async function processArtifact(artifact: Artifact): Promise<void> {
const match = /^vscode_(?<product>[^_]+)_(?<os>[^_]+)_(?<arch>[^_]+)_(?<unprocessedType>[^_]+)$/.exec(artifact.name);
if (!match) {
throw new Error(`Invalid artifact name: ${artifact.name}`);
}
const { product, os, arch, unprocessedType } = match.groups!;
const log = (...args: any[]) => console.log(`[${product} ${os} ${arch} ${unprocessedType}]`, ...args);
const filePath = await retry(async attempt => {
const artifactZipPath = path.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`);
await downloadSequencer.queue(async () => {
log(`Downloading ${artifact.resource.downloadUrl} (attempt ${attempt})...`);
await downloadArtifact(artifact, artifactZipPath);
});
log(`Extracting (attempt ${attempt}) ...`);
const filePath = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY'));
const artifactSize = fs.statSync(filePath).size;
if (artifactSize !== Number(artifact.resource.properties.artifactsize)) {
throw new Error(`Artifact size mismatch. Expected ${artifact.resource.properties.artifactsize}. Actual ${artifactSize}`);
}
return filePath;
});
// getPlatform needs the unprocessedType
const quality = e('VSCODE_QUALITY');
const commit = e('BUILD_SOURCEVERSION');
const platform = getPlatform(product, os, arch, unprocessedType);
const type = getRealType(unprocessedType);
const size = fs.statSync(filePath).size;
const stream = fs.createReadStream(filePath);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
log(`Publishing (size = ${size}, SHA1 = ${sha1hash}, SHA256 = ${sha256hash})...`);
const [{ assetUrl, mooncakeUrl }, prssUrl] = await Promise.all([
uploadAssetLegacy(log, quality, commit, filePath),
releaseAndProvision(
log,
e('RELEASE_TENANT_ID'),
e('RELEASE_CLIENT_ID'),
e('RELEASE_AUTH_CERT_SUBJECT_NAME'),
e('RELEASE_REQUEST_SIGNING_CERT_SUBJECT_NAME'),
e('PROVISION_TENANT_ID'),
e('PROVISION_AAD_USERNAME'),
e('PROVISION_AAD_PASSWORD'),
commit,
quality,
filePath
)
]);
const asset: Asset = { platform, type, url: assetUrl, hash: sha1hash, mooncakeUrl, prssUrl, sha256hash, size, supportsFastUpdate: true };
log('Creating asset...', JSON.stringify(asset));
await retry(async (attempt) => {
await cosmosSequencer.queue(async () => {
log(`Creating asset in Cosmos DB (attempt ${attempt})...`);
const aadCredentials = new ClientSecretCredential(e('AZURE_TENANT_ID'), e('AZURE_CLIENT_ID'), e('AZURE_CLIENT_SECRET'));
const client = new CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT'), aadCredentials });
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
});
});
log('Asset successfully created');
}
async function main() {
const done = new State();
const processing = new Set<string>();
for (const name of done) {
console.log(`\u2705 ${name}`);
}
const stages = new Set<string>();
if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { stages.add('Windows'); }
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { stages.add('Linux'); }
if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { stages.add('Alpine'); }
if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { stages.add('macOS'); }
if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { stages.add('Web'); }
const operations: { name: string; operation: Promise<void> }[] = [];
while (true) {
const [timeline, artifacts] = await Promise.all([retry(() => getPipelineTimeline()), retry(() => getPipelineArtifacts())]);
const stagesCompleted = new Set<string>(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name));
const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s));
if (stagesInProgress.length > 0) {
console.log('Stages in progress:', stagesInProgress.join(', '));
}
const artifactsInProgress = artifacts.filter(a => processing.has(a.name));
if (artifactsInProgress.length > 0) {
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
}
if (stagesCompleted.size === stages.size && artifacts.length === done.size + processing.size) {
break;
}
for (const artifact of artifacts) {
if (done.has(artifact.name) || processing.has(artifact.name)) {
continue;
}
console.log(`Found new artifact: ${artifact.name}`);
processing.add(artifact.name);
const operation = processArtifact(artifact).then(() => {
processing.delete(artifact.name);
done.add(artifact.name);
console.log(`\u2705 ${artifact.name}`);
});
operations.push({ name: artifact.name, operation });
}
await new Promise(c => setTimeout(c, 10_000));
}
console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`);
const artifactsInProgress = operations.filter(o => processing.has(o.name));
if (artifactsInProgress.length > 0) {
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
}
const results = await Promise.allSettled(operations.map(o => o.operation));
for (let i = 0; i < operations.length; i++) {
const result = results[i];
if (result.status === 'rejected') {
console.error(`[${operations[i].name}]`, result.reason);
}
}
if (results.some(r => r.status === 'rejected')) {
throw new Error('Some artifacts failed to publish');
}
console.log(`All ${done.size} artifacts published!`);
}
if (require.main === module) {
main().then(() => {
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});
}

View file

@ -12,18 +12,17 @@ async function retry(fn) {
return await fn(run);
}
catch (err) {
if (!/ECONNRESET|CredentialUnavailableError|Audience validation failed/i.test(err.message)) {
if (!/fetch failed|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) {
throw err;
}
lastError = err;
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
console.log(`Request failed, retrying in ${millis}ms...`);
// maximum delay is 10th retry: ~3 seconds
const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run)));
await new Promise(c => setTimeout(c, millis));
}
}
console.log(`Too many retries, aborting.`);
console.error(`Too many retries, aborting.`);
throw lastError;
}
exports.retry = retry;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmV0cnkuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyJyZXRyeS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7OztnR0FHZ0c7OztBQUV6RixLQUFLLFVBQVUsS0FBSyxDQUFJLEVBQW1DO0lBQ2pFLElBQUksU0FBNEIsQ0FBQztJQUVqQyxLQUFLLElBQUksR0FBRyxHQUFHLENBQUMsRUFBRSxHQUFHLElBQUksRUFBRSxFQUFFLEdBQUcsRUFBRSxFQUFFLENBQUM7UUFDcEMsSUFBSSxDQUFDO1lBQ0osT0FBTyxNQUFNLEVBQUUsQ0FBQyxHQUFHLENBQUMsQ0FBQztRQUN0QixDQUFDO1FBQUMsT0FBTyxHQUFHLEVBQUUsQ0FBQztZQUNkLElBQUksQ0FBQyxtRUFBbUUsQ0FBQyxJQUFJLENBQUMsR0FBRyxDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7Z0JBQzVGLE1BQU0sR0FBRyxDQUFDO1lBQ1gsQ0FBQztZQUVELFNBQVMsR0FBRyxHQUFHLENBQUM7WUFDaEIsTUFBTSxNQUFNLEdBQUcsQ0FBQyxJQUFJLENBQUMsTUFBTSxFQUFFLEdBQUcsR0FBRyxDQUFDLEdBQUcsQ0FBQyxFQUFFLEdBQUcsSUFBSSxDQUFDLEdBQUcsQ0FBQyxHQUFHLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQztZQUNqRSxPQUFPLENBQUMsR0FBRyxDQUFDLCtCQUErQixNQUFNLE9BQU8sQ0FBQyxDQUFDO1lBRTFELDBDQUEwQztZQUMxQyxNQUFNLElBQUksT0FBTyxDQUFDLENBQUMsQ0FBQyxFQUFFLENBQUMsVUFBVSxDQUFDLENBQUMsRUFBRSxNQUFNLENBQUMsQ0FBQyxDQUFDO1FBQy9DLENBQUM7SUFDRixDQUFDO0lBRUQsT0FBTyxDQUFDLEdBQUcsQ0FBQyw2QkFBNkIsQ0FBQyxDQUFDO0lBQzNDLE1BQU0sU0FBUyxDQUFDO0FBQ2pCLENBQUM7QUF0QkQsc0JBc0JDIn0=
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmV0cnkuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyJyZXRyeS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7OztnR0FHZ0c7OztBQUV6RixLQUFLLFVBQVUsS0FBSyxDQUFJLEVBQW1DO0lBQ2pFLElBQUksU0FBNEIsQ0FBQztJQUVqQyxLQUFLLElBQUksR0FBRyxHQUFHLENBQUMsRUFBRSxHQUFHLElBQUksRUFBRSxFQUFFLEdBQUcsRUFBRSxFQUFFLENBQUM7UUFDcEMsSUFBSSxDQUFDO1lBQ0osT0FBTyxNQUFNLEVBQUUsQ0FBQyxHQUFHLENBQUMsQ0FBQztRQUN0QixDQUFDO1FBQUMsT0FBTyxHQUFHLEVBQUUsQ0FBQztZQUNkLElBQUksQ0FBQyw4UEFBOFAsQ0FBQyxJQUFJLENBQUMsR0FBRyxDQUFDLE9BQU8sQ0FBQyxFQUFFLENBQUM7Z0JBQ3ZSLE1BQU0sR0FBRyxDQUFDO1lBQ1gsQ0FBQztZQUVELFNBQVMsR0FBRyxHQUFHLENBQUM7WUFFaEIsMENBQTBDO1lBQzFDLE1BQU0sTUFBTSxHQUFHLElBQUksQ0FBQyxLQUFLLENBQUMsQ0FBQyxJQUFJLENBQUMsTUFBTSxFQUFFLEdBQUcsR0FBRyxDQUFDLEdBQUcsQ0FBQyxFQUFFLEdBQUcsSUFBSSxDQUFDLEdBQUcsQ0FBQyxHQUFHLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDO1lBQzdFLE1BQU0sSUFBSSxPQUFPLENBQUMsQ0FBQyxDQUFDLEVBQUUsQ0FBQyxVQUFVLENBQUMsQ0FBQyxFQUFFLE1BQU0sQ0FBQyxDQUFDLENBQUM7UUFDL0MsQ0FBQztJQUNGLENBQUM7SUFFRCxPQUFPLENBQUMsS0FBSyxDQUFDLDZCQUE2QixDQUFDLENBQUM7SUFDN0MsTUFBTSxTQUFTLENBQUM7QUFDakIsQ0FBQztBQXJCRCxzQkFxQkMifQ==

View file

@ -10,19 +10,18 @@ export async function retry<T>(fn: (attempt: number) => Promise<T>): Promise<T>
try {
return await fn(run);
} catch (err) {
if (!/ECONNRESET|CredentialUnavailableError|Audience validation failed/i.test(err.message)) {
if (!/fetch failed|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) {
throw err;
}
lastError = err;
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
console.log(`Request failed, retrying in ${millis}ms...`);
// maximum delay is 10th retry: ~3 seconds
const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run)));
await new Promise(c => setTimeout(c, millis));
}
}
console.log(`Too many retries, aborting.`);
console.error(`Too many retries, aborting.`);
throw lastError;
}

View file

@ -8,11 +8,11 @@ const sign_1 = require("./sign");
const path = require("path");
(0, sign_1.main)([
process.env['EsrpCliDllPath'],
'windows',
'sign-windows',
process.env['ESRPPKI'],
process.env['ESRPAADUsername'],
process.env['ESRPAADPassword'],
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic2lnbi13aW4zMi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbInNpZ24td2luMzIudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IjtBQUFBOzs7Z0dBR2dHOztBQUVoRyxpQ0FBOEI7QUFDOUIsNkJBQTZCO0FBRTdCLElBQUEsV0FBSSxFQUFDO0lBQ0osT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsQ0FBRTtJQUM5QixTQUFTO0lBQ1QsT0FBTyxDQUFDLEdBQUcsQ0FBQyxTQUFTLENBQUU7SUFDdkIsT0FBTyxDQUFDLEdBQUcsQ0FBQyxpQkFBaUIsQ0FBRTtJQUMvQixPQUFPLENBQUMsR0FBRyxDQUFDLGlCQUFpQixDQUFFO0lBQy9CLElBQUksQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsQ0FBQztJQUM3QixJQUFJLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUM7Q0FDOUIsQ0FBQyxDQUFDIn0=
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic2lnbi13aW4zMi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbInNpZ24td2luMzIudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IjtBQUFBOzs7Z0dBR2dHOztBQUVoRyxpQ0FBOEI7QUFDOUIsNkJBQTZCO0FBRTdCLElBQUEsV0FBSSxFQUFDO0lBQ0osT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQkFBZ0IsQ0FBRTtJQUM5QixjQUFjO0lBQ2QsT0FBTyxDQUFDLEdBQUcsQ0FBQyxTQUFTLENBQUU7SUFDdkIsT0FBTyxDQUFDLEdBQUcsQ0FBQyxpQkFBaUIsQ0FBRTtJQUMvQixPQUFPLENBQUMsR0FBRyxDQUFDLGlCQUFpQixDQUFFO0lBQy9CLElBQUksQ0FBQyxPQUFPLENBQUMsT0FBTyxDQUFDLElBQUksQ0FBQyxDQUFDLENBQUMsQ0FBQztJQUM3QixJQUFJLENBQUMsUUFBUSxDQUFDLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0FBQyxDQUFDLENBQUM7Q0FDOUIsQ0FBQyxDQUFDIn0=

View file

@ -8,7 +8,7 @@ import * as path from 'path';
main([
process.env['EsrpCliDllPath']!,
'windows',
'sign-windows',
process.env['ESRPPKI']!,
process.env['ESRPAADUsername']!,
process.env['ESRPAADPassword']!,

File diff suppressed because one or more lines are too long

View file

@ -5,11 +5,11 @@
import * as cp from 'child_process';
import * as fs from 'fs';
import * as crypto from 'crypto';
import * as path from 'path';
import * as os from 'os';
import * as crypto from 'crypto';
class Temp {
export class Temp {
private _files: string[] = [];
tmpNameSync(): string {
@ -29,18 +29,92 @@ class Temp {
}
}
function getParams(type: string): string {
interface Params {
readonly keyCode: string;
readonly operationSetCode: string;
readonly parameters: {
readonly parameterName: string;
readonly parameterValue: string;
}[];
readonly toolName: string;
readonly toolVersion: string;
}
function getParams(type: string): Params[] {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'windows-appx':
return '[{"keyCode":"CP-229979","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-229979","operationSetCode":"SigntoolVerify","parameters":[],"toolName":"sign","toolVersion":"1.0"}]';
case 'pgp':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[],"toolName":"sign","toolVersion":"1.0"}]';
case 'sign-windows':
return [
{
keyCode: 'CP-230012',
operationSetCode: 'SigntoolSign',
parameters: [
{ parameterName: 'OpusName', parameterValue: 'VS Code' },
{ parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' },
{ parameterName: 'Append', parameterValue: '/as' },
{ parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' },
{ parameterName: 'PageHash', parameterValue: '/NPH' },
{ parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' }
],
toolName: 'sign',
toolVersion: '1.0'
},
{
keyCode: 'CP-230012',
operationSetCode: 'SigntoolVerify',
parameters: [
{ parameterName: 'VerifyAll', parameterValue: '/all' }
],
toolName: 'sign',
toolVersion: '1.0'
}
];
case 'sign-windows-appx':
return [
{
keyCode: 'CP-229979',
operationSetCode: 'SigntoolSign',
parameters: [
{ parameterName: 'OpusName', parameterValue: 'VS Code' },
{ parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' },
{ parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' },
{ parameterName: 'PageHash', parameterValue: '/NPH' },
{ parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' }
],
toolName: 'sign',
toolVersion: '1.0'
},
{
keyCode: 'CP-229979',
operationSetCode: 'SigntoolVerify',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}
];
case 'sign-pgp':
return [{
keyCode: 'CP-450779-Pgp',
operationSetCode: 'LinuxSign',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}];
case 'sign-darwin':
return [{
keyCode: 'CP-401337-Apple',
operationSetCode: 'MacAppDeveloperSign',
parameters: [{ parameterName: 'Hardening', parameterValue: '--options=runtime' }],
toolName: 'sign',
toolVersion: '1.0'
}];
case 'notarize-darwin':
return [{
keyCode: 'CP-401337-Apple',
operationSetCode: 'MacAppNotarize',
parameters: [],
toolName: 'sign',
toolVersion: '1.0'
}];
default:
throw new Error(`Sign type ${type} not found`);
}
@ -54,7 +128,7 @@ export function main([esrpCliPath, type, cert, username, password, folderPath, p
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
fs.writeFileSync(paramsPath, JSON.stringify(getParams(type)));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);

View file

@ -24,10 +24,10 @@ steps:
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
displayName: Download $(VSCODE_ARCH) artifact
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll sign-darwin $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Codesign
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll notarize-darwin $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Notarize
- script: unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip -d $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)

View file

@ -313,10 +313,10 @@ steps:
continueOnError: true
displayName: Download ESRPClient
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll pgp $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/deb '*.deb'
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll sign-pgp $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/deb '*.deb'
displayName: Codesign deb
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll pgp $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/rpm '*.rpm'
- script: node build/azure-pipelines/common/sign $(Agent.ToolsDirectory)/esrpclient/*/*/net6.0/esrpcli.dll sign-pgp $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/rpm '*.rpm'
displayName: Codesign rpm
- script: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_"

View file

@ -126,6 +126,14 @@ variables:
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
- name: PRSS_CDN_URL
value: https://vscode.download.prss.microsoft.com/dbazure/download
- name: PRSS_RELEASE_TENANT_ID
value: 975f013f-7f24-47e8-a7d3-abc4752bf346
- name: PRSS_RELEASE_CLIENT_ID
value: c24324f7-e65f-4c45-8702-ed2d4c35df99
- name: PRSS_PROVISION_TENANT_ID
value: 72f988bf-86f1-41af-91ab-2d7cd011db47
- name: AZURE_CDN_URL
value: https://az764295.vo.msecnd.net
- name: AZURE_DOCUMENTDB_ENDPOINT
@ -627,7 +635,7 @@ stages:
- stage: Publish
dependsOn:
- Compile
pool: 1es-ubuntu-20.04-x64
pool: 1es-windows-2019-x64
variables:
- name: BUILDS_API_URL
value: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/

View file

@ -1,130 +0,0 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = 'Stop'
$ProgressPreference = 'SilentlyContinue'
$ARTIFACT_PROCESSED_WILDCARD_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_*/artifacts_processed_*"
$ARTIFACT_PROCESSED_FILE_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_$env:SYSTEM_STAGEATTEMPT/artifacts_processed_$env:SYSTEM_STAGEATTEMPT.txt"
function Get-PipelineArtifact {
param($Name = '*')
try {
$res = Invoke-RestMethod "$($env:BUILDS_API_URL)artifacts?api-version=6.0" -Headers @{
Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
} -MaximumRetryCount 5 -RetryIntervalSec 1
if (!$res) {
return
}
$res.value | Where-Object { $_.name -Like $Name -and $_.name -NotLike "*sbom" }
} catch {
Write-Warning $_
}
}
# This set will keep track of which artifacts have already been processed
$set = [System.Collections.Generic.HashSet[string]]::new()
if (Test-Path $ARTIFACT_PROCESSED_WILDCARD_PATH) {
# Grab the latest artifact_processed text file and load all assets already processed from that.
# This means that the latest artifact_processed_*.txt file has all of the contents of the previous ones.
# Note: The kusto-like syntax only works in PS7+ and only in scripts, not at the REPL.
Get-ChildItem $ARTIFACT_PROCESSED_WILDCARD_PATH
# Sort by file name length first and then Name to make sure we sort numerically. Ex. 12 comes after 9.
| Sort-Object { $_.Name.Length },Name -Bottom 1
| Get-Content
| ForEach-Object {
$set.Add($_) | Out-Null
Write-Host "Already processed artifact: $_"
}
}
# Create the artifact file that will be used for this run
New-Item -Path $ARTIFACT_PROCESSED_FILE_PATH -Force | Out-Null
# Determine which stages we need to watch
$stages = @(
if ($env:VSCODE_BUILD_STAGE_WINDOWS -eq 'True') { 'Windows' }
if ($env:VSCODE_BUILD_STAGE_LINUX -eq 'True') { 'Linux' }
if ($env:VSCODE_BUILD_STAGE_ALPINE -eq 'True') { 'Alpine' }
if ($env:VSCODE_BUILD_STAGE_MACOS -eq 'True') { 'macOS' }
if ($env:VSCODE_BUILD_STAGE_WEB -eq 'True') { 'Web' }
)
do {
Start-Sleep -Seconds 10
$artifacts = Get-PipelineArtifact -Name 'vscode_*'
if (!$artifacts) {
continue
}
$artifacts | ForEach-Object {
$artifactName = $_.name
if($set.Add($artifactName)) {
Write-Host "Processing artifact: '$artifactName. Downloading from: $($_.resource.downloadUrl)"
$extractPath = "$env:AGENT_TEMPDIRECTORY/$artifactName.zip"
try {
Invoke-RestMethod $_.resource.downloadUrl -OutFile $extractPath -Headers @{
Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
} -MaximumRetryCount 5 -RetryIntervalSec 1 -TimeoutSec 300 | Out-Null
Write-Host "Extracting artifact: '$extractPath'"
Expand-Archive -Path $extractPath -DestinationPath $env:AGENT_TEMPDIRECTORY | Out-Null
} catch {
Write-Warning $_
$set.Remove($artifactName) | Out-Null
continue
}
$null,$product,$os,$arch,$type = $artifactName -split '_'
$asset = Get-ChildItem -rec "$env:AGENT_TEMPDIRECTORY/$artifactName"
if ($asset.Size -ne $_.resource.properties.artifactsize) {
Write-Warning "Artifact size mismatch for '$artifactName'. Expected: $($_.resource.properties.artifactsize). Actual: $($asset.Size)"
$set.Remove($artifactName) | Out-Null
continue
}
Write-Host "Processing artifact with the following values:"
# turning in into an object just to log nicely
@{
product = $product
os = $os
arch = $arch
type = $type
asset = $asset.Name
} | Format-Table
exec { node build/azure-pipelines/common/createAsset.js $product $os $arch $type $asset.Name $asset.FullName }
}
# Mark the artifact as processed. Make sure to keep the previously
# processed artifacts in the file as well, not just from this run.
$artifactName >> $ARTIFACT_PROCESSED_FILE_PATH
}
# Get the timeline and see if it says the other stage completed
try {
$timeline = Invoke-RestMethod "$($env:BUILDS_API_URL)timeline?api-version=6.0" -Headers @{
Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
} -MaximumRetryCount 5 -RetryIntervalSec 1
} catch {
Write-Warning $_
continue
}
foreach ($stage in $stages) {
$otherStageFinished = $timeline.records | Where-Object { $_.name -eq $stage -and $_.type -eq 'stage' -and $_.state -eq 'completed' }
if (!$otherStageFinished) {
break
}
}
$artifacts = Get-PipelineArtifact -Name 'vscode_*'
$artifactsStillToProcess = $artifacts.Count -ne $set.Count
} while (!$otherStageFinished -or $artifactsStillToProcess)
Write-Host "Processed $($set.Count) artifacts."

View file

@ -5,17 +5,30 @@ steps:
versionFilePath: .nvmrc
nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download
- task: SFP.build-tasks.esrpclient-tools-task.EsrpClientTool@2
displayName: "Use EsrpClient"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode-build-secrets
SecretsFilter: "github-distro-mixin-password"
SecretsFilter: "github-distro-mixin-password,esrp-aad-username,esrp-aad-password"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode-build-packages
SecretsFilter: "vscode-esrp,c24324f7-e65f-4c45-8702-ed2d4c35df99"
# allow-any-unicode-next-line
- pwsh: Write-Host "##vso[build.addbuildtag]🚀"
displayName: Add build tag
- pwsh: node build/npm/setupBuildYarnrc
displayName: Prepare build dependencies
- pwsh: yarn
workingDirectory: build
displayName: Install build dependencies
@ -65,7 +78,25 @@ steps:
AZURE_CLIENT_SECRET: "$(AZURE_CLIENT_SECRET)"
displayName: Create build if it hasn't been created before
- pwsh: build/azure-pipelines/product-publish.ps1
- pwsh: |
$ErrorActionPreference = "Stop"
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
$AuthCertBytes = [System.Convert]::FromBase64String("$(vscode-esrp)")
$CertCollection.Import($AuthCertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
$RequestSigningCertIndex = $CertCollection.Count
$RequestSigningCertBytes = [System.Convert]::FromBase64String("$(c24324f7-e65f-4c45-8702-ed2d4c35df99)")
$CertCollection.Import($RequestSigningCertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
$CertStore.Open("ReadWrite")
$CertStore.AddRange($CertCollection)
$CertStore.Close()
$AuthCertSubjectName = $CertCollection[0].Subject
$RequestSigningCertSubjectName = $CertCollection[$RequestSigningCertIndex].Subject
Write-Host "##vso[task.setvariable variable=RELEASE_AUTH_CERT_SUBJECT_NAME]$AuthCertSubjectName"
Write-Host "##vso[task.setvariable variable=RELEASE_REQUEST_SIGNING_CERT_SUBJECT_NAME]$RequestSigningCertSubjectName"
displayName: Import certificates
- pwsh: node build/azure-pipelines/common/publish.js
env:
GITHUB_TOKEN: "$(github-distro-mixin-password)"
AZURE_TENANT_ID: "$(AZURE_TENANT_ID)"
@ -75,7 +106,15 @@ steps:
AZURE_MOONCAKE_CLIENT_ID: "$(AZURE_MOONCAKE_CLIENT_ID)"
AZURE_MOONCAKE_CLIENT_SECRET: "$(AZURE_MOONCAKE_CLIENT_SECRET)"
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
RELEASE_TENANT_ID: "$(PRSS_RELEASE_TENANT_ID)"
RELEASE_CLIENT_ID: "$(PRSS_RELEASE_CLIENT_ID)"
RELEASE_AUTH_CERT_SUBJECT_NAME: "$(RELEASE_AUTH_CERT_SUBJECT_NAME)"
RELEASE_REQUEST_SIGNING_CERT_SUBJECT_NAME: "$(RELEASE_REQUEST_SIGNING_CERT_SUBJECT_NAME)"
PROVISION_TENANT_ID: "$(PRSS_PROVISION_TENANT_ID)"
PROVISION_AAD_USERNAME: "$(esrp-aad-username)"
PROVISION_AAD_PASSWORD: "$(esrp-aad-password)"
displayName: Process artifacts
retryCountOnTaskFailure: 3
- publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
artifact: artifacts_processed_$(System.StageAttempt)
@ -108,6 +147,8 @@ steps:
if($didStageFail) {
$failedStages += $stage
Write-Host "'$stage' failed!"
Write-Host $didStageFail
} else {
Write-Host "'$stage' did not fail."
}

View file

@ -242,11 +242,11 @@ steps:
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.dll,*.exe,*.node'
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.dll,*.exe,*.node'
displayName: Codesign executables and shared libraries
- ${{ if eq(parameters.VSCODE_QUALITY, 'insider') }}:
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows-appx $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.appx'
- powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows-appx $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.appx'
displayName: Codesign context menu appx package
- ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}:

View file

@ -3,9 +3,9 @@
"version": "1.0.0",
"license": "MIT",
"devDependencies": {
"@azure/cosmos": "^3.17.3",
"@azure/identity": "^3.1.3",
"@azure/storage-blob": "^12.13.0",
"@azure/cosmos": "^3",
"@azure/identity": "^3.4.1",
"@azure/storage-blob": "^12.17.0",
"@electron/get": "^1.12.4",
"@types/ansi-colors": "^3.2.0",
"@types/byline": "^4.2.32",
@ -56,7 +56,8 @@
"through2": "^4.0.2",
"tmp": "^0.2.1",
"vscode-universal-bundler": "^0.0.2",
"workerpool": "^6.4.0"
"workerpool": "^6.4.0",
"yauzl": "^2.10.0"
},
"scripts": {
"compile": "../node_modules/.bin/tsc -p tsconfig.build.json",

View file

@ -22,6 +22,15 @@
"@azure/abort-controller" "^1.0.0"
tslib "^2.2.0"
"@azure/core-auth@^1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.5.0.tgz#a41848c5c31cb3b7c84c409885267d55a2c92e44"
integrity sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw==
dependencies:
"@azure/abort-controller" "^1.0.0"
"@azure/core-util" "^1.1.0"
tslib "^2.2.0"
"@azure/core-client@^1.4.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.5.0.tgz#7aabb87d20e08db3683a117191c844bc19adb74e"
@ -117,7 +126,15 @@
dependencies:
tslib "^2.2.0"
"@azure/core-util@^1.0.0", "@azure/core-util@^1.1.1":
"@azure/core-util@^1.1.0", "@azure/core-util@^1.6.1":
version "1.6.1"
resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.6.1.tgz#fea221c4fa43c26543bccf799beb30c1c7878f5a"
integrity sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ==
dependencies:
"@azure/abort-controller" "^1.0.0"
tslib "^2.2.0"
"@azure/core-util@^1.1.1":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.2.0.tgz#3499deba1fc36dda6f1912b791809b6f15d4a392"
integrity sha512-ffGIw+Qs8bNKNLxz5UPkz4/VBM/EZY07mPve1ZYFqYUdPwFqRj0RPk0U7LZMOfT7GCck9YjuT1Rfp1PApNl1ng==
@ -125,7 +142,7 @@
"@azure/abort-controller" "^1.0.0"
tslib "^2.2.0"
"@azure/cosmos@^3.17.3":
"@azure/cosmos@^3":
version "3.17.3"
resolved "https://registry.yarnpkg.com/@azure/cosmos/-/cosmos-3.17.3.tgz#380398496af8ef3473ae0a9ad8cdbab32d91eb08"
integrity sha512-wBglkQ6Irjv5Vo2iw8fd6eYj60WYRSSg4/0DBkeOP6BwQ4RA91znsOHd6s3qG6UAbNgYuzC9Nnq07vlFFZkHEw==
@ -144,27 +161,25 @@
universal-user-agent "^6.0.0"
uuid "^8.3.0"
"@azure/identity@^3.1.3":
version "3.1.3"
resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.1.3.tgz#667a635b305d9d519e5c91cea5ba3390d0d2c198"
integrity sha512-y0jFjSfHsVPwXSwi3KaSPtOZtJZqhiqAhWUXfFYBUd/+twUBovZRXspBwLrF5rJe0r5NyvmScpQjL+TYDTQVvw==
"@azure/identity@^3.4.1":
version "3.4.1"
resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-3.4.1.tgz#18ba48b7421c818ef8116e8eec3c03ec1a62649a"
integrity sha512-oQ/r5MBdfZTMIUcY5Ch8G7Vv9aIXDkEYyU4Dfqjim4MQN+LY2uiQ57P1JDopMLeHCsZxM4yy8lEdne3tM9Xhzg==
dependencies:
"@azure/abort-controller" "^1.0.0"
"@azure/core-auth" "^1.3.0"
"@azure/core-auth" "^1.5.0"
"@azure/core-client" "^1.4.0"
"@azure/core-rest-pipeline" "^1.1.0"
"@azure/core-tracing" "^1.0.0"
"@azure/core-util" "^1.0.0"
"@azure/core-util" "^1.6.1"
"@azure/logger" "^1.0.0"
"@azure/msal-browser" "^2.32.2"
"@azure/msal-common" "^9.0.2"
"@azure/msal-node" "^1.14.6"
"@azure/msal-browser" "^3.5.0"
"@azure/msal-node" "^2.5.1"
events "^3.0.0"
jws "^4.0.0"
open "^8.0.0"
stoppable "^1.1.0"
tslib "^2.2.0"
uuid "^8.3.0"
"@azure/logger@^1.0.0":
version "1.0.1"
@ -173,36 +188,31 @@
dependencies:
tslib "^2.0.0"
"@azure/msal-browser@^2.32.2":
version "2.35.0"
resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-2.35.0.tgz#39b553f5da140d5d16bf90e0d92f1bcc6f0d61d3"
integrity sha512-L+gSBbJfU3H81Bnj+VIVjO7jRpt2Ex+4i2YVOPE50ykfQ5W9mtBFMRCHb1K+8FzTeyQH/KkQv6bC+MdaU+3LEw==
"@azure/msal-browser@^3.5.0":
version "3.5.0"
resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-3.5.0.tgz#eb64c931c78c2b75c70807f618e1284bbb183380"
integrity sha512-2NtMuel4CI3UEelCPKkNRXgKzpWEX48fvxIvPz7s0/sTcCaI08r05IOkH2GkXW+czUOtuY6+oGafJCpumnjRLg==
dependencies:
"@azure/msal-common" "^12.0.0"
"@azure/msal-common" "14.4.0"
"@azure/msal-common@^12.0.0":
version "12.0.0"
resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-12.0.0.tgz#bcb41fd31657a34c4218ec38332de76ec6bf03e6"
integrity sha512-SvQl4JWy1yZnxyq0xng/urf103wz68UJG0K9Dq2NM2to7ePA+R1hMisKnXELJvZrEGYANGbh/Hc0T9piGqOteQ==
"@azure/msal-common@14.4.0":
version "14.4.0"
resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.4.0.tgz#f938c1d96bb73d65baab985c96faaa273c97cfd5"
integrity sha512-ffCymScQuMKVj+YVfwNI52A5Tu+uiZO2eTf+c+3TXxdAssks4nokJhtr+uOOMxH0zDi6d1OjFKFKeXODK0YLSg==
"@azure/msal-common@^9.0.2":
version "9.1.1"
resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-9.1.1.tgz#906d27905c956fe91bd8f31855fc624359098d83"
integrity sha512-we9xR8lvu47fF0h+J8KyXoRy9+G/fPzm3QEa2TrdR3jaVS3LKAyE2qyMuUkNdbVkvzl8Zr9f7l+IUSP22HeqXw==
"@azure/msal-node@^1.14.6":
version "1.17.0"
resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-1.17.0.tgz#fa7bba155719a7e26ac6e8d4941dd56e807e458a"
integrity sha512-aOKykKxDc+Kf5vcdOUPdKlJ96YAIyrHyl4W8RyfMqw0iApDckOuhejNwlZr6/M7U40wo1Wj4PwxRVx7d8OFBFg==
"@azure/msal-node@^2.5.1":
version "2.5.1"
resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.5.1.tgz#d180a1ba5fdc611a318a8f018a2db3453e2e2898"
integrity sha512-PsPRISqCG253HQk1cAS7eJW7NWTbnBGpG+vcGGz5z4JYRdnM2EIXlj1aBpXCdozenEPtXEVvHn2ELleW1w82nQ==
dependencies:
"@azure/msal-common" "^12.0.0"
"@azure/msal-common" "14.4.0"
jsonwebtoken "^9.0.0"
uuid "^8.3.0"
"@azure/storage-blob@^12.13.0":
version "12.13.0"
resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.13.0.tgz#9209cbb5c2cd463fb967a0f2ae144ace20879160"
integrity sha512-t3Q2lvBMJucgTjQcP5+hvEJMAsJSk0qmAnjDLie2td017IiduZbbC9BOcFfmwzR6y6cJdZOuewLCNFmEx9IrXA==
"@azure/storage-blob@^12.17.0":
version "12.17.0"
resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.17.0.tgz#04aad7f59cb08dbbe5b1b672a9f5b6256c8c9006"
integrity sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ==
dependencies:
"@azure/abort-controller" "^1.0.0"
"@azure/core-http" "^3.0.0"