Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/build/azure-pipelines/common/publish.ts
3520 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import fs from 'fs';
7
import path from 'path';
8
import { Readable } from 'stream';
9
import type { ReadableStream } from 'stream/web';
10
import { pipeline } from 'node:stream/promises';
11
import yauzl from 'yauzl';
12
import crypto from 'crypto';
13
import { retry } from './retry';
14
import { CosmosClient } from '@azure/cosmos';
15
import cp from 'child_process';
16
import os from 'os';
17
import { Worker, isMainThread, workerData } from 'node:worker_threads';
18
import { ConfidentialClientApplication } from '@azure/msal-node';
19
import { BlobClient, BlobServiceClient, BlockBlobClient, ContainerClient, ContainerSASPermissions, generateBlobSASQueryParameters } from '@azure/storage-blob';
20
import jws from 'jws';
21
import { clearInterval, setInterval } from 'node:timers';
22
23
export function e(name: string): string {
24
const result = process.env[name];
25
26
if (typeof result !== 'string') {
27
throw new Error(`Missing env: ${name}`);
28
}
29
30
return result;
31
}
32
33
function hashStream(hashName: string, stream: Readable): Promise<Buffer> {
34
return new Promise<Buffer>((c, e) => {
35
const shasum = crypto.createHash(hashName);
36
37
stream
38
.on('data', shasum.update.bind(shasum))
39
.on('error', e)
40
.on('close', () => c(shasum.digest()));
41
});
42
}
43
44
interface ReleaseSubmitResponse {
45
operationId: string;
46
esrpCorrelationId: string;
47
code?: string;
48
message?: string;
49
target?: string;
50
innerError?: any;
51
}
52
53
interface ReleaseActivityInfo {
54
activityId: string;
55
activityType: string;
56
name: string;
57
status: string;
58
errorCode: number;
59
errorMessages: string[];
60
beginTime?: Date;
61
endTime?: Date;
62
lastModifiedAt?: Date;
63
}
64
65
interface InnerServiceError {
66
code: string;
67
details: { [key: string]: string };
68
innerError?: InnerServiceError;
69
}
70
71
interface ReleaseError {
72
errorCode: number;
73
errorMessages: string[];
74
}
75
76
const enum StatusCode {
77
Pass = 'pass',
78
Aborted = 'aborted',
79
Inprogress = 'inprogress',
80
FailCanRetry = 'failCanRetry',
81
FailDoNotRetry = 'failDoNotRetry',
82
PendingAnalysis = 'pendingAnalysis',
83
Cancelled = 'cancelled'
84
}
85
86
interface ReleaseResultMessage {
87
activities: ReleaseActivityInfo[];
88
childWorkflowType: string;
89
clientId: string;
90
customerCorrelationId: string;
91
errorInfo: InnerServiceError;
92
groupId: string;
93
lastModifiedAt: Date;
94
operationId: string;
95
releaseError: ReleaseError;
96
requestSubmittedAt: Date;
97
routedRegion: string;
98
status: StatusCode;
99
totalFileCount: number;
100
totalReleaseSize: number;
101
version: string;
102
}
103
104
interface ReleaseFileInfo {
105
name?: string;
106
hash?: number[];
107
sourceLocation?: FileLocation;
108
sizeInBytes?: number;
109
hashType?: FileHashType;
110
fileId?: any;
111
distributionRelativePath?: string;
112
partNumber?: string;
113
friendlyFileName?: string;
114
tenantFileLocationType?: string;
115
tenantFileLocation?: string;
116
signedEngineeringCopyLocation?: string;
117
encryptedDistributionBlobLocation?: string;
118
preEncryptedDistributionBlobLocation?: string;
119
secondaryDistributionHashRequired?: boolean;
120
secondaryDistributionHashType?: FileHashType;
121
lastModifiedAt?: Date;
122
cultureCodes?: string[];
123
displayFileInDownloadCenter?: boolean;
124
isPrimaryFileInDownloadCenter?: boolean;
125
fileDownloadDetails?: FileDownloadDetails[];
126
}
127
128
interface ReleaseDetailsFileInfo extends ReleaseFileInfo { }
129
130
interface ReleaseDetailsMessage extends ReleaseResultMessage {
131
clusterRegion: string;
132
correlationVector: string;
133
releaseCompletedAt?: Date;
134
releaseInfo: ReleaseInfo;
135
productInfo: ProductInfo;
136
createdBy: UserInfo;
137
owners: OwnerInfo[];
138
accessPermissionsInfo: AccessPermissionsInfo;
139
files: ReleaseDetailsFileInfo[];
140
comments: string[];
141
cancellationReason: string;
142
downloadCenterInfo: DownloadCenterInfo;
143
}
144
145
146
interface ProductInfo {
147
name?: string;
148
version?: string;
149
description?: string;
150
}
151
152
interface ReleaseInfo {
153
title?: string;
154
minimumNumberOfApprovers: number;
155
properties?: { [key: string]: string };
156
isRevision?: boolean;
157
revisionNumber?: string;
158
}
159
160
type FileLocationType = 'azureBlob';
161
162
interface FileLocation {
163
type: FileLocationType;
164
blobUrl: string;
165
uncPath?: string;
166
url?: string;
167
}
168
169
type FileHashType = 'sha256' | 'sha1';
170
171
interface FileDownloadDetails {
172
portalName: string;
173
downloadUrl: string;
174
}
175
176
interface RoutingInfo {
177
intent?: string;
178
contentType?: string;
179
contentOrigin?: string;
180
productState?: string;
181
audience?: string;
182
}
183
184
interface ReleaseFileInfo {
185
name?: string;
186
hash?: number[];
187
sourceLocation?: FileLocation;
188
sizeInBytes?: number;
189
hashType?: FileHashType;
190
fileId?: any;
191
distributionRelativePath?: string;
192
partNumber?: string;
193
friendlyFileName?: string;
194
tenantFileLocationType?: string;
195
tenantFileLocation?: string;
196
signedEngineeringCopyLocation?: string;
197
encryptedDistributionBlobLocation?: string;
198
preEncryptedDistributionBlobLocation?: string;
199
secondaryDistributionHashRequired?: boolean;
200
secondaryDistributionHashType?: FileHashType;
201
lastModifiedAt?: Date;
202
cultureCodes?: string[];
203
displayFileInDownloadCenter?: boolean;
204
isPrimaryFileInDownloadCenter?: boolean;
205
fileDownloadDetails?: FileDownloadDetails[];
206
}
207
208
interface UserInfo {
209
userPrincipalName?: string;
210
}
211
212
interface OwnerInfo {
213
owner: UserInfo;
214
}
215
216
interface ApproverInfo {
217
approver: UserInfo;
218
isAutoApproved: boolean;
219
isMandatory: boolean;
220
}
221
222
interface AccessPermissionsInfo {
223
mainPublisher?: string;
224
releasePublishers?: string[];
225
channelDownloadEntityDetails?: { [key: string]: string[] };
226
}
227
228
interface DownloadCenterLocaleInfo {
229
cultureCode?: string;
230
downloadTitle?: string;
231
shortName?: string;
232
shortDescription?: string;
233
longDescription?: string;
234
instructions?: string;
235
additionalInfo?: string;
236
keywords?: string[];
237
version?: string;
238
relatedLinks?: { [key: string]: URL };
239
}
240
241
interface DownloadCenterInfo {
242
downloadCenterId: number;
243
publishToDownloadCenter?: boolean;
244
publishingGroup?: string;
245
operatingSystems?: string[];
246
relatedReleases?: string[];
247
kbNumbers?: string[];
248
sbNumbers?: string[];
249
locales?: DownloadCenterLocaleInfo[];
250
additionalProperties?: { [key: string]: string };
251
}
252
253
interface ReleaseRequestMessage {
254
driEmail: string[];
255
groupId?: string;
256
customerCorrelationId: string;
257
esrpCorrelationId: string;
258
contextData?: { [key: string]: string };
259
releaseInfo: ReleaseInfo;
260
productInfo: ProductInfo;
261
files: ReleaseFileInfo[];
262
routingInfo?: RoutingInfo;
263
createdBy: UserInfo;
264
owners: OwnerInfo[];
265
approvers: ApproverInfo[];
266
accessPermissionsInfo: AccessPermissionsInfo;
267
jwsToken?: string;
268
publisherId?: string;
269
downloadCenterInfo?: DownloadCenterInfo;
270
}
271
272
function getCertificateBuffer(input: string) {
273
return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64');
274
}
275
276
function getThumbprint(input: string, algorithm: string): Buffer {
277
const buffer = getCertificateBuffer(input);
278
return crypto.createHash(algorithm).update(buffer).digest();
279
}
280
281
function getKeyFromPFX(pfx: string): string {
282
const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx');
283
const pemKeyPath = path.join(os.tmpdir(), 'key.pem');
284
285
try {
286
const pfxCertificate = Buffer.from(pfx, 'base64');
287
fs.writeFileSync(pfxCertificatePath, pfxCertificate);
288
cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`);
289
const raw = fs.readFileSync(pemKeyPath, 'utf-8');
290
const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)![0];
291
return result;
292
} finally {
293
fs.rmSync(pfxCertificatePath, { force: true });
294
fs.rmSync(pemKeyPath, { force: true });
295
}
296
}
297
298
function getCertificatesFromPFX(pfx: string): string[] {
299
const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx');
300
const pemCertificatePath = path.join(os.tmpdir(), 'cert.pem');
301
302
try {
303
const pfxCertificate = Buffer.from(pfx, 'base64');
304
fs.writeFileSync(pfxCertificatePath, pfxCertificate);
305
cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`);
306
const raw = fs.readFileSync(pemCertificatePath, 'utf-8');
307
const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g);
308
return matches ? matches.reverse() : [];
309
} finally {
310
fs.rmSync(pfxCertificatePath, { force: true });
311
fs.rmSync(pemCertificatePath, { force: true });
312
}
313
}
314
315
class ESRPReleaseService {
316
317
static async create(
318
log: (...args: any[]) => void,
319
tenantId: string,
320
clientId: string,
321
authCertificatePfx: string,
322
requestSigningCertificatePfx: string,
323
containerClient: ContainerClient,
324
stagingSasToken: string
325
) {
326
const authKey = getKeyFromPFX(authCertificatePfx);
327
const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0];
328
const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx);
329
const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx);
330
331
const app = new ConfidentialClientApplication({
332
auth: {
333
clientId,
334
authority: `https://login.microsoftonline.com/${tenantId}`,
335
clientCertificate: {
336
thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'),
337
privateKey: authKey,
338
x5c: authCertificate
339
}
340
}
341
});
342
343
const response = await app.acquireTokenByClientCredential({
344
scopes: ['https://api.esrp.microsoft.com/.default']
345
});
346
347
return new ESRPReleaseService(log, clientId, response!.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken);
348
}
349
350
private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/';
351
352
private constructor(
353
private readonly log: (...args: any[]) => void,
354
private readonly clientId: string,
355
private readonly accessToken: string,
356
private readonly requestSigningCertificates: string[],
357
private readonly requestSigningKey: string,
358
private readonly containerClient: ContainerClient,
359
private readonly stagingSasToken: string
360
) { }
361
362
async createRelease(version: string, filePath: string, friendlyFileName: string) {
363
const correlationId = crypto.randomUUID();
364
const blobClient = this.containerClient.getBlockBlobClient(correlationId);
365
366
this.log(`Uploading ${filePath} to ${blobClient.url}`);
367
await blobClient.uploadFile(filePath);
368
this.log('Uploaded blob successfully');
369
370
try {
371
this.log(`Submitting release for ${version}: ${filePath}`);
372
const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient);
373
374
this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`);
375
376
// Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times
377
for (let i = 0; i < 720; i++) {
378
await new Promise(c => setTimeout(c, 5000));
379
const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId);
380
381
if (releaseStatus.status === 'pass') {
382
break;
383
} else if (releaseStatus.status === 'aborted') {
384
this.log(JSON.stringify(releaseStatus));
385
throw new Error(`Release was aborted`);
386
} else if (releaseStatus.status !== 'inprogress') {
387
this.log(JSON.stringify(releaseStatus));
388
throw new Error(`Unknown error when polling for release`);
389
}
390
}
391
392
const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId);
393
394
if (releaseDetails.status !== 'pass') {
395
throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`);
396
}
397
398
this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails![0].downloadUrl);
399
return releaseDetails.files[0].fileDownloadDetails![0].downloadUrl;
400
} finally {
401
this.log(`Deleting blob ${blobClient.url}`);
402
await blobClient.delete();
403
this.log('Deleted blob successfully');
404
}
405
}
406
407
private async submitRelease(
408
version: string,
409
filePath: string,
410
friendlyFileName: string,
411
correlationId: string,
412
blobClient: BlobClient
413
): Promise<ReleaseSubmitResponse> {
414
const size = fs.statSync(filePath).size;
415
const hash = await hashStream('sha256', fs.createReadStream(filePath));
416
const blobUrl = `${blobClient.url}?${this.stagingSasToken}`;
417
418
const message: ReleaseRequestMessage = {
419
customerCorrelationId: correlationId,
420
esrpCorrelationId: correlationId,
421
driEmail: ['[email protected]'],
422
createdBy: { userPrincipalName: '[email protected]' },
423
owners: [{ owner: { userPrincipalName: '[email protected]' } }],
424
approvers: [{ approver: { userPrincipalName: '[email protected]' }, isAutoApproved: true, isMandatory: false }],
425
releaseInfo: {
426
title: 'VS Code',
427
properties: {
428
'ReleaseContentType': 'InstallPackage'
429
},
430
minimumNumberOfApprovers: 1
431
},
432
productInfo: {
433
name: 'VS Code',
434
version,
435
description: 'VS Code'
436
},
437
accessPermissionsInfo: {
438
mainPublisher: 'VSCode',
439
channelDownloadEntityDetails: {
440
AllDownloadEntities: ['VSCode']
441
}
442
},
443
routingInfo: {
444
intent: 'filedownloadlinkgeneration'
445
},
446
files: [{
447
name: path.basename(filePath),
448
friendlyFileName,
449
tenantFileLocation: blobUrl,
450
tenantFileLocationType: 'AzureBlob',
451
sourceLocation: {
452
type: 'azureBlob',
453
blobUrl
454
},
455
hashType: 'sha256',
456
hash: Array.from(hash),
457
sizeInBytes: size
458
}]
459
};
460
461
message.jwsToken = await this.generateJwsToken(message);
462
463
const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, {
464
method: 'POST',
465
headers: {
466
'Content-Type': 'application/json',
467
'Authorization': `Bearer ${this.accessToken}`
468
},
469
body: JSON.stringify(message)
470
});
471
472
if (!res.ok) {
473
const text = await res.text();
474
throw new Error(`Failed to submit release: ${res.statusText}\n${text}`);
475
}
476
477
return await res.json() as ReleaseSubmitResponse;
478
}
479
480
private async getReleaseStatus(releaseId: string): Promise<ReleaseResultMessage> {
481
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`;
482
483
const res = await retry(() => fetch(url, {
484
headers: {
485
'Authorization': `Bearer ${this.accessToken}`
486
}
487
}));
488
489
if (!res.ok) {
490
const text = await res.text();
491
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
492
}
493
494
return await res.json() as ReleaseResultMessage;
495
}
496
497
private async getReleaseDetails(releaseId: string): Promise<ReleaseDetailsMessage> {
498
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`;
499
500
const res = await retry(() => fetch(url, {
501
headers: {
502
'Authorization': `Bearer ${this.accessToken}`
503
}
504
}));
505
506
if (!res.ok) {
507
const text = await res.text();
508
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
509
}
510
511
return await res.json() as ReleaseDetailsMessage;
512
}
513
514
private async generateJwsToken(message: ReleaseRequestMessage): Promise<string> {
515
return jws.sign({
516
header: {
517
alg: 'RS256',
518
crit: ['exp', 'x5t'],
519
// Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483)
520
exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000,
521
// Release service uses hex format, not base64url :roll_eyes:
522
x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'),
523
// Release service uses a '.' separated string, not an array of strings :roll_eyes:
524
x5c: this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.') as any,
525
},
526
payload: message,
527
privateKey: this.requestSigningKey,
528
});
529
}
530
}
531
532
class State {
533
534
private statePath: string;
535
private set = new Set<string>();
536
537
constructor() {
538
const pipelineWorkspacePath = e('PIPELINE_WORKSPACE');
539
const previousState = fs.readdirSync(pipelineWorkspacePath)
540
.map(name => /^artifacts_processed_(\d+)$/.exec(name))
541
.filter((match): match is RegExpExecArray => !!match)
542
.map(match => ({ name: match![0], attempt: Number(match![1]) }))
543
.sort((a, b) => b.attempt - a.attempt)[0];
544
545
if (previousState) {
546
const previousStatePath = path.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt');
547
fs.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name));
548
}
549
550
const stageAttempt = e('SYSTEM_STAGEATTEMPT');
551
this.statePath = path.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`);
552
fs.mkdirSync(path.dirname(this.statePath), { recursive: true });
553
fs.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join(''));
554
}
555
556
get size(): number {
557
return this.set.size;
558
}
559
560
has(name: string): boolean {
561
return this.set.has(name);
562
}
563
564
add(name: string): void {
565
this.set.add(name);
566
fs.appendFileSync(this.statePath, `${name}\n`);
567
}
568
569
[Symbol.iterator](): IterableIterator<string> {
570
return this.set[Symbol.iterator]();
571
}
572
}
573
574
const azdoFetchOptions = {
575
headers: {
576
// Pretend we're a web browser to avoid download rate limits
577
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0',
578
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
579
'Accept-Encoding': 'gzip, deflate, br',
580
'Accept-Language': 'en-US,en;q=0.9',
581
'Referer': 'https://dev.azure.com',
582
Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}`
583
}
584
};
585
586
export async function requestAZDOAPI<T>(path: string): Promise<T> {
587
const abortController = new AbortController();
588
const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000);
589
590
try {
591
const res = await retry(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal }));
592
593
if (!res.ok) {
594
throw new Error(`Unexpected status code: ${res.status}`);
595
}
596
597
return await res.json();
598
} finally {
599
clearTimeout(timeout);
600
}
601
}
602
603
export interface Artifact {
604
readonly name: string;
605
readonly resource: {
606
readonly downloadUrl: string;
607
readonly properties: {
608
readonly artifactsize: number;
609
};
610
};
611
}
612
613
async function getPipelineArtifacts(): Promise<Artifact[]> {
614
const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts');
615
return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name));
616
}
617
618
interface Timeline {
619
readonly records: {
620
readonly name: string;
621
readonly type: string;
622
readonly state: string;
623
readonly result: string;
624
}[];
625
}
626
627
async function getPipelineTimeline(): Promise<Timeline> {
628
return await requestAZDOAPI<Timeline>('timeline');
629
}
630
631
async function downloadArtifact(artifact: Artifact, downloadPath: string): Promise<void> {
632
const abortController = new AbortController();
633
const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000);
634
635
try {
636
const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal });
637
638
if (!res.ok) {
639
throw new Error(`Unexpected status code: ${res.status}`);
640
}
641
642
await pipeline(Readable.fromWeb(res.body as ReadableStream), fs.createWriteStream(downloadPath));
643
} finally {
644
clearTimeout(timeout);
645
}
646
}
647
648
async function unzip(packagePath: string, outputPath: string): Promise<string[]> {
649
return new Promise((resolve, reject) => {
650
yauzl.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => {
651
if (err) {
652
return reject(err);
653
}
654
655
const result: string[] = [];
656
zipfile!.on('entry', entry => {
657
if (/\/$/.test(entry.fileName)) {
658
zipfile!.readEntry();
659
} else {
660
zipfile!.openReadStream(entry, (err, istream) => {
661
if (err) {
662
return reject(err);
663
}
664
665
const filePath = path.join(outputPath, entry.fileName);
666
fs.mkdirSync(path.dirname(filePath), { recursive: true });
667
668
const ostream = fs.createWriteStream(filePath);
669
ostream.on('finish', () => {
670
result.push(filePath);
671
zipfile!.readEntry();
672
});
673
istream?.on('error', err => reject(err));
674
istream!.pipe(ostream);
675
});
676
}
677
});
678
679
zipfile!.on('close', () => resolve(result));
680
zipfile!.readEntry();
681
});
682
});
683
}
684
685
interface Asset {
686
platform: string;
687
type: string;
688
url: string;
689
mooncakeUrl?: string;
690
prssUrl?: string;
691
hash: string;
692
sha256hash: string;
693
size: number;
694
supportsFastUpdate?: boolean;
695
}
696
697
// Contains all of the logic for mapping details to our actual product names in CosmosDB
698
function getPlatform(product: string, os: string, arch: string, type: string): string {
699
switch (os) {
700
case 'win32':
701
switch (product) {
702
case 'client': {
703
switch (type) {
704
case 'archive':
705
return `win32-${arch}-archive`;
706
case 'setup':
707
return `win32-${arch}`;
708
case 'user-setup':
709
return `win32-${arch}-user`;
710
default:
711
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
712
}
713
}
714
case 'server':
715
return `server-win32-${arch}`;
716
case 'web':
717
return `server-win32-${arch}-web`;
718
case 'cli':
719
return `cli-win32-${arch}`;
720
default:
721
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
722
}
723
case 'alpine':
724
switch (product) {
725
case 'server':
726
return `server-alpine-${arch}`;
727
case 'web':
728
return `server-alpine-${arch}-web`;
729
case 'cli':
730
return `cli-alpine-${arch}`;
731
default:
732
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
733
}
734
case 'linux':
735
switch (type) {
736
case 'snap':
737
return `linux-snap-${arch}`;
738
case 'archive-unsigned':
739
switch (product) {
740
case 'client':
741
return `linux-${arch}`;
742
case 'server':
743
return `server-linux-${arch}`;
744
case 'web':
745
if (arch === 'standalone') {
746
return 'web-standalone';
747
}
748
return `server-linux-${arch}-web`;
749
default:
750
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
751
}
752
case 'deb-package':
753
return `linux-deb-${arch}`;
754
case 'rpm-package':
755
return `linux-rpm-${arch}`;
756
case 'cli':
757
return `cli-linux-${arch}`;
758
default:
759
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
760
}
761
case 'darwin':
762
switch (product) {
763
case 'client':
764
if (arch === 'x64') {
765
return 'darwin';
766
}
767
return `darwin-${arch}`;
768
case 'server':
769
if (arch === 'x64') {
770
return 'server-darwin';
771
}
772
return `server-darwin-${arch}`;
773
case 'web':
774
if (arch === 'x64') {
775
return 'server-darwin-web';
776
}
777
return `server-darwin-${arch}-web`;
778
case 'cli':
779
return `cli-darwin-${arch}`;
780
default:
781
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
782
}
783
default:
784
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
785
}
786
}
787
788
// Contains all of the logic for mapping types to our actual types in CosmosDB
789
function getRealType(type: string) {
790
switch (type) {
791
case 'user-setup':
792
return 'setup';
793
case 'deb-package':
794
case 'rpm-package':
795
return 'package';
796
default:
797
return type;
798
}
799
}
800
801
async function withLease<T>(client: BlockBlobClient, fn: () => Promise<T>) {
802
const lease = client.getBlobLeaseClient();
803
804
for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes
805
try {
806
await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired
807
await lease.acquireLease(60);
808
809
try {
810
const abortController = new AbortController();
811
const refresher = new Promise<void>((c, e) => {
812
abortController.signal.onabort = () => {
813
clearInterval(interval);
814
c();
815
};
816
817
const interval = setInterval(() => {
818
lease.renewLease().catch(err => {
819
clearInterval(interval);
820
e(new Error('Failed to renew lease ' + err));
821
});
822
}, 30_000);
823
});
824
825
const result = await Promise.race([fn(), refresher]);
826
abortController.abort();
827
return result;
828
} finally {
829
await lease.releaseLease();
830
}
831
} catch (err) {
832
if (err.statusCode !== 409 && err.statusCode !== 412) {
833
throw err;
834
}
835
836
await new Promise(c => setTimeout(c, 5000));
837
}
838
}
839
840
throw new Error('Failed to acquire lease on blob after 30 minutes');
841
}
842
843
async function processArtifact(
844
artifact: Artifact,
845
filePath: string
846
) {
847
const log = (...args: any[]) => console.log(`[${artifact.name}]`, ...args);
848
const match = /^vscode_(?<product>[^_]+)_(?<os>[^_]+)(?:_legacy)?_(?<arch>[^_]+)_(?<unprocessedType>[^_]+)$/.exec(artifact.name);
849
850
if (!match) {
851
throw new Error(`Invalid artifact name: ${artifact.name}`);
852
}
853
854
const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS'));
855
const quality = e('VSCODE_QUALITY');
856
const version = e('BUILD_SOURCEVERSION');
857
const friendlyFileName = `${quality}/${version}/${path.basename(filePath)}`;
858
859
const blobServiceClient = new BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken });
860
const leasesContainerClient = blobServiceClient.getContainerClient('leases');
861
await leasesContainerClient.createIfNotExists();
862
const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName);
863
864
log(`Acquiring lease for: ${friendlyFileName}`);
865
866
await withLease(leaseBlobClient, async () => {
867
log(`Successfully acquired lease for: ${friendlyFileName}`);
868
869
const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`;
870
const res = await retry(() => fetch(url));
871
872
if (res.status === 200) {
873
log(`Already released and provisioned: ${url}`);
874
} else {
875
const stagingContainerClient = blobServiceClient.getContainerClient('staging');
876
await stagingContainerClient.createIfNotExists();
877
878
const now = new Date().valueOf();
879
const oneHour = 60 * 60 * 1000;
880
const oneHourAgo = new Date(now - oneHour);
881
const oneHourFromNow = new Date(now + oneHour);
882
const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow);
883
const sasOptions = { containerName: 'staging', permissions: ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow };
884
const stagingSasToken = generateBlobSASQueryParameters(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString();
885
886
const releaseService = await ESRPReleaseService.create(
887
log,
888
e('RELEASE_TENANT_ID'),
889
e('RELEASE_CLIENT_ID'),
890
e('RELEASE_AUTH_CERT'),
891
e('RELEASE_REQUEST_SIGNING_CERT'),
892
stagingContainerClient,
893
stagingSasToken
894
);
895
896
await releaseService.createRelease(version, filePath, friendlyFileName);
897
}
898
899
const { product, os, arch, unprocessedType } = match.groups!;
900
const platform = getPlatform(product, os, arch, unprocessedType);
901
const type = getRealType(unprocessedType);
902
const size = fs.statSync(filePath).size;
903
const stream = fs.createReadStream(filePath);
904
const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256
905
const asset: Asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true };
906
log('Creating asset...');
907
908
const result = await retry(async (attempt) => {
909
log(`Creating asset in Cosmos DB (attempt ${attempt})...`);
910
const client = new CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT')!, tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) });
911
const scripts = client.database('builds').container(quality).scripts;
912
const { resource: result } = await scripts.storedProcedure('createAsset').execute<'ok' | 'already exists'>('', [version, asset, true]);
913
return result;
914
});
915
916
if (result === 'already exists') {
917
log('Asset already exists!');
918
} else {
919
log('Asset successfully created: ', JSON.stringify(asset, undefined, 2));
920
}
921
});
922
923
log(`Successfully released lease for: ${friendlyFileName}`);
924
}
925
926
// It is VERY important that we don't download artifacts too much too fast from AZDO.
927
// AZDO throttles us SEVERELY if we do. Not just that, but they also close open
928
// sockets, so the whole things turns to a grinding halt. So, downloading and extracting
929
// happens serially in the main thread, making the downloads are spaced out
930
// properly. For each extracted artifact, we spawn a worker thread to upload it to
931
// the CDN and finally update the build in Cosmos DB.
932
async function main() {
933
if (!isMainThread) {
934
const { artifact, artifactFilePath } = workerData;
935
await processArtifact(artifact, artifactFilePath);
936
return;
937
}
938
939
const done = new State();
940
const processing = new Set<string>();
941
942
for (const name of done) {
943
console.log(`\u2705 ${name}`);
944
}
945
946
const stages = new Set<string>(['Compile']);
947
948
if (
949
e('VSCODE_BUILD_STAGE_LINUX') === 'True' ||
950
e('VSCODE_BUILD_STAGE_ALPINE') === 'True' ||
951
e('VSCODE_BUILD_STAGE_MACOS') === 'True' ||
952
e('VSCODE_BUILD_STAGE_WINDOWS') === 'True'
953
) {
954
stages.add('CompileCLI');
955
}
956
957
if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { stages.add('Windows'); }
958
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { stages.add('Linux'); }
959
if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { stages.add('Alpine'); }
960
if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { stages.add('macOS'); }
961
if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { stages.add('Web'); }
962
963
let timeline: Timeline;
964
let artifacts: Artifact[];
965
let resultPromise = Promise.resolve<PromiseSettledResult<void>[]>([]);
966
const operations: { name: string; operation: Promise<void> }[] = [];
967
968
while (true) {
969
[timeline, artifacts] = await Promise.all([retry(() => getPipelineTimeline()), retry(() => getPipelineArtifacts())]);
970
const stagesCompleted = new Set<string>(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name));
971
const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s));
972
const artifactsInProgress = artifacts.filter(a => processing.has(a.name));
973
974
if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) {
975
break;
976
} else if (stagesInProgress.length > 0) {
977
console.log('Stages in progress:', stagesInProgress.join(', '));
978
} else if (artifactsInProgress.length > 0) {
979
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
980
} else {
981
console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`);
982
}
983
984
for (const artifact of artifacts) {
985
if (done.has(artifact.name) || processing.has(artifact.name)) {
986
continue;
987
}
988
989
console.log(`[${artifact.name}] Found new artifact`);
990
991
const artifactZipPath = path.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`);
992
993
await retry(async (attempt) => {
994
const start = Date.now();
995
console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`);
996
await downloadArtifact(artifact, artifactZipPath);
997
const archiveSize = fs.statSync(artifactZipPath).size;
998
const downloadDurationS = (Date.now() - start) / 1000;
999
const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS);
1000
console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`);
1001
});
1002
1003
const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY'));
1004
const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0];
1005
1006
processing.add(artifact.name);
1007
const promise = new Promise<void>((resolve, reject) => {
1008
const worker = new Worker(__filename, { workerData: { artifact, artifactFilePath } });
1009
worker.on('error', reject);
1010
worker.on('exit', code => {
1011
if (code === 0) {
1012
resolve();
1013
} else {
1014
reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`));
1015
}
1016
});
1017
});
1018
1019
const operation = promise.then(() => {
1020
processing.delete(artifact.name);
1021
done.add(artifact.name);
1022
console.log(`\u2705 ${artifact.name} `);
1023
});
1024
1025
operations.push({ name: artifact.name, operation });
1026
resultPromise = Promise.allSettled(operations.map(o => o.operation));
1027
}
1028
1029
await new Promise(c => setTimeout(c, 10_000));
1030
}
1031
1032
console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`);
1033
1034
const artifactsInProgress = operations.filter(o => processing.has(o.name));
1035
1036
if (artifactsInProgress.length > 0) {
1037
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
1038
}
1039
1040
const results = await resultPromise;
1041
1042
for (let i = 0; i < operations.length; i++) {
1043
const result = results[i];
1044
1045
if (result.status === 'rejected') {
1046
console.error(`[${operations[i].name}]`, result.reason);
1047
}
1048
}
1049
1050
// Fail the job if any of the artifacts failed to publish
1051
if (results.some(r => r.status === 'rejected')) {
1052
throw new Error('Some artifacts failed to publish');
1053
}
1054
1055
// Also fail the job if any of the stages did not succeed
1056
let shouldFail = false;
1057
1058
for (const stage of stages) {
1059
const record = timeline.records.find(r => r.name === stage && r.type === 'Stage')!;
1060
1061
if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') {
1062
shouldFail = true;
1063
console.error(`Stage ${stage} did not succeed: ${record.result}`);
1064
}
1065
}
1066
1067
if (shouldFail) {
1068
throw new Error('Some stages did not succeed');
1069
}
1070
1071
console.log(`All ${done.size} artifacts published!`);
1072
}
1073
1074
if (require.main === module) {
1075
main().then(() => {
1076
process.exit(0);
1077
}, err => {
1078
console.error(err);
1079
process.exit(1);
1080
});
1081
}
1082
1083