Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/build/azure-pipelines/common/publish.ts
5318 views
1
/*---------------------------------------------------------------------------------------------
2
* Copyright (c) Microsoft Corporation. All rights reserved.
3
* Licensed under the MIT License. See License.txt in the project root for license information.
4
*--------------------------------------------------------------------------------------------*/
5
6
import fs from 'fs';
7
import path from 'path';
8
import { Readable } from 'stream';
9
import type { ReadableStream } from 'stream/web';
10
import { pipeline } from 'node:stream/promises';
11
import yauzl from 'yauzl';
12
import crypto from 'crypto';
13
import { retry } from './retry.ts';
14
import { CosmosClient } from '@azure/cosmos';
15
import cp from 'child_process';
16
import os from 'os';
17
import { Worker, isMainThread, workerData } from 'node:worker_threads';
18
import { ConfidentialClientApplication } from '@azure/msal-node';
19
import { BlobClient, BlobServiceClient, BlockBlobClient, ContainerClient, ContainerSASPermissions, generateBlobSASQueryParameters } from '@azure/storage-blob';
20
import jws from 'jws';
21
import { clearInterval, setInterval } from 'node:timers';
22
23
export function e(name: string): string {
24
const result = process.env[name];
25
26
if (typeof result !== 'string') {
27
throw new Error(`Missing env: ${name}`);
28
}
29
30
return result;
31
}
32
33
function hashStream(hashName: string, stream: Readable): Promise<Buffer> {
34
return new Promise<Buffer>((c, e) => {
35
const shasum = crypto.createHash(hashName);
36
37
stream
38
.on('data', shasum.update.bind(shasum))
39
.on('error', e)
40
.on('close', () => c(shasum.digest()));
41
});
42
}
43
44
interface ReleaseSubmitResponse {
45
operationId: string;
46
esrpCorrelationId: string;
47
code?: string;
48
message?: string;
49
target?: string;
50
innerError?: any;
51
}
52
53
interface ReleaseActivityInfo {
54
activityId: string;
55
activityType: string;
56
name: string;
57
status: string;
58
errorCode: number;
59
errorMessages: string[];
60
beginTime?: Date;
61
endTime?: Date;
62
lastModifiedAt?: Date;
63
}
64
65
interface InnerServiceError {
66
code: string;
67
details: { [key: string]: string };
68
innerError?: InnerServiceError;
69
}
70
71
interface ReleaseError {
72
errorCode: number;
73
errorMessages: string[];
74
}
75
76
const StatusCode = Object.freeze({
77
Pass: 'pass',
78
Aborted: 'aborted',
79
Inprogress: 'inprogress',
80
FailCanRetry: 'failCanRetry',
81
FailDoNotRetry: 'failDoNotRetry',
82
PendingAnalysis: 'pendingAnalysis',
83
Cancelled: 'cancelled'
84
});
85
type StatusCode = typeof StatusCode[keyof typeof StatusCode];
86
87
interface ReleaseResultMessage {
88
activities: ReleaseActivityInfo[];
89
childWorkflowType: string;
90
clientId: string;
91
customerCorrelationId: string;
92
errorInfo: InnerServiceError;
93
groupId: string;
94
lastModifiedAt: Date;
95
operationId: string;
96
releaseError: ReleaseError;
97
requestSubmittedAt: Date;
98
routedRegion: string;
99
status: StatusCode;
100
totalFileCount: number;
101
totalReleaseSize: number;
102
version: string;
103
}
104
105
interface ReleaseFileInfo {
106
name?: string;
107
hash?: number[];
108
sourceLocation?: FileLocation;
109
sizeInBytes?: number;
110
hashType?: FileHashType;
111
fileId?: any;
112
distributionRelativePath?: string;
113
partNumber?: string;
114
friendlyFileName?: string;
115
tenantFileLocationType?: string;
116
tenantFileLocation?: string;
117
signedEngineeringCopyLocation?: string;
118
encryptedDistributionBlobLocation?: string;
119
preEncryptedDistributionBlobLocation?: string;
120
secondaryDistributionHashRequired?: boolean;
121
secondaryDistributionHashType?: FileHashType;
122
lastModifiedAt?: Date;
123
cultureCodes?: string[];
124
displayFileInDownloadCenter?: boolean;
125
isPrimaryFileInDownloadCenter?: boolean;
126
fileDownloadDetails?: FileDownloadDetails[];
127
}
128
129
interface ReleaseDetailsFileInfo extends ReleaseFileInfo { }
130
131
interface ReleaseDetailsMessage extends ReleaseResultMessage {
132
clusterRegion: string;
133
correlationVector: string;
134
releaseCompletedAt?: Date;
135
releaseInfo: ReleaseInfo;
136
productInfo: ProductInfo;
137
createdBy: UserInfo;
138
owners: OwnerInfo[];
139
accessPermissionsInfo: AccessPermissionsInfo;
140
files: ReleaseDetailsFileInfo[];
141
comments: string[];
142
cancellationReason: string;
143
downloadCenterInfo: DownloadCenterInfo;
144
}
145
146
147
interface ProductInfo {
148
name?: string;
149
version?: string;
150
description?: string;
151
}
152
153
interface ReleaseInfo {
154
title?: string;
155
minimumNumberOfApprovers: number;
156
properties?: { [key: string]: string };
157
isRevision?: boolean;
158
revisionNumber?: string;
159
}
160
161
type FileLocationType = 'azureBlob';
162
163
interface FileLocation {
164
type: FileLocationType;
165
blobUrl: string;
166
uncPath?: string;
167
url?: string;
168
}
169
170
type FileHashType = 'sha256' | 'sha1';
171
172
interface FileDownloadDetails {
173
portalName: string;
174
downloadUrl: string;
175
}
176
177
interface RoutingInfo {
178
intent?: string;
179
contentType?: string;
180
contentOrigin?: string;
181
productState?: string;
182
audience?: string;
183
}
184
185
interface ReleaseFileInfo {
186
name?: string;
187
hash?: number[];
188
sourceLocation?: FileLocation;
189
sizeInBytes?: number;
190
hashType?: FileHashType;
191
fileId?: any;
192
distributionRelativePath?: string;
193
partNumber?: string;
194
friendlyFileName?: string;
195
tenantFileLocationType?: string;
196
tenantFileLocation?: string;
197
signedEngineeringCopyLocation?: string;
198
encryptedDistributionBlobLocation?: string;
199
preEncryptedDistributionBlobLocation?: string;
200
secondaryDistributionHashRequired?: boolean;
201
secondaryDistributionHashType?: FileHashType;
202
lastModifiedAt?: Date;
203
cultureCodes?: string[];
204
displayFileInDownloadCenter?: boolean;
205
isPrimaryFileInDownloadCenter?: boolean;
206
fileDownloadDetails?: FileDownloadDetails[];
207
}
208
209
interface UserInfo {
210
userPrincipalName?: string;
211
}
212
213
interface OwnerInfo {
214
owner: UserInfo;
215
}
216
217
interface ApproverInfo {
218
approver: UserInfo;
219
isAutoApproved: boolean;
220
isMandatory: boolean;
221
}
222
223
interface AccessPermissionsInfo {
224
mainPublisher?: string;
225
releasePublishers?: string[];
226
channelDownloadEntityDetails?: { [key: string]: string[] };
227
}
228
229
interface DownloadCenterLocaleInfo {
230
cultureCode?: string;
231
downloadTitle?: string;
232
shortName?: string;
233
shortDescription?: string;
234
longDescription?: string;
235
instructions?: string;
236
additionalInfo?: string;
237
keywords?: string[];
238
version?: string;
239
relatedLinks?: { [key: string]: URL };
240
}
241
242
interface DownloadCenterInfo {
243
downloadCenterId: number;
244
publishToDownloadCenter?: boolean;
245
publishingGroup?: string;
246
operatingSystems?: string[];
247
relatedReleases?: string[];
248
kbNumbers?: string[];
249
sbNumbers?: string[];
250
locales?: DownloadCenterLocaleInfo[];
251
additionalProperties?: { [key: string]: string };
252
}
253
254
interface ReleaseRequestMessage {
255
driEmail: string[];
256
groupId?: string;
257
customerCorrelationId: string;
258
esrpCorrelationId: string;
259
contextData?: { [key: string]: string };
260
releaseInfo: ReleaseInfo;
261
productInfo: ProductInfo;
262
files: ReleaseFileInfo[];
263
routingInfo?: RoutingInfo;
264
createdBy: UserInfo;
265
owners: OwnerInfo[];
266
approvers: ApproverInfo[];
267
accessPermissionsInfo: AccessPermissionsInfo;
268
jwsToken?: string;
269
publisherId?: string;
270
downloadCenterInfo?: DownloadCenterInfo;
271
}
272
273
function getCertificateBuffer(input: string) {
274
return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64');
275
}
276
277
function getThumbprint(input: string, algorithm: string): Buffer {
278
const buffer = getCertificateBuffer(input);
279
return crypto.createHash(algorithm).update(buffer).digest();
280
}
281
282
function getKeyFromPFX(pfx: string): string {
283
const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx');
284
const pemKeyPath = path.join(os.tmpdir(), 'key.pem');
285
286
try {
287
const pfxCertificate = Buffer.from(pfx, 'base64');
288
fs.writeFileSync(pfxCertificatePath, pfxCertificate);
289
cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`);
290
const raw = fs.readFileSync(pemKeyPath, 'utf-8');
291
const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)![0];
292
return result;
293
} finally {
294
fs.rmSync(pfxCertificatePath, { force: true });
295
fs.rmSync(pemKeyPath, { force: true });
296
}
297
}
298
299
function getCertificatesFromPFX(pfx: string): string[] {
300
const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx');
301
const pemCertificatePath = path.join(os.tmpdir(), 'cert.pem');
302
303
try {
304
const pfxCertificate = Buffer.from(pfx, 'base64');
305
fs.writeFileSync(pfxCertificatePath, pfxCertificate);
306
cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`);
307
const raw = fs.readFileSync(pemCertificatePath, 'utf-8');
308
const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g);
309
return matches ? matches.reverse() : [];
310
} finally {
311
fs.rmSync(pfxCertificatePath, { force: true });
312
fs.rmSync(pemCertificatePath, { force: true });
313
}
314
}
315
316
class ESRPReleaseService {
317
318
static async create(
319
log: (...args: unknown[]) => void,
320
tenantId: string,
321
clientId: string,
322
authCertificatePfx: string,
323
requestSigningCertificatePfx: string,
324
containerClient: ContainerClient,
325
stagingSasToken: string
326
) {
327
const authKey = getKeyFromPFX(authCertificatePfx);
328
const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0];
329
const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx);
330
const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx);
331
332
const app = new ConfidentialClientApplication({
333
auth: {
334
clientId,
335
authority: `https://login.microsoftonline.com/${tenantId}`,
336
clientCertificate: {
337
thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'),
338
privateKey: authKey,
339
x5c: authCertificate
340
}
341
}
342
});
343
344
const response = await app.acquireTokenByClientCredential({
345
scopes: ['https://api.esrp.microsoft.com/.default']
346
});
347
348
return new ESRPReleaseService(log, clientId, response!.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken);
349
}
350
351
private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/';
352
353
private readonly log: (...args: unknown[]) => void;
354
private readonly clientId: string;
355
private readonly accessToken: string;
356
private readonly requestSigningCertificates: string[];
357
private readonly requestSigningKey: string;
358
private readonly containerClient: ContainerClient;
359
private readonly stagingSasToken: string;
360
361
private constructor(
362
log: (...args: unknown[]) => void,
363
clientId: string,
364
accessToken: string,
365
requestSigningCertificates: string[],
366
requestSigningKey: string,
367
containerClient: ContainerClient,
368
stagingSasToken: string
369
) {
370
this.log = log;
371
this.clientId = clientId;
372
this.accessToken = accessToken;
373
this.requestSigningCertificates = requestSigningCertificates;
374
this.requestSigningKey = requestSigningKey;
375
this.containerClient = containerClient;
376
this.stagingSasToken = stagingSasToken;
377
}
378
379
async createRelease(version: string, filePath: string, friendlyFileName: string) {
380
const correlationId = crypto.randomUUID();
381
const blobClient = this.containerClient.getBlockBlobClient(correlationId);
382
383
this.log(`Uploading ${filePath} to ${blobClient.url}`);
384
await blobClient.uploadFile(filePath);
385
this.log('Uploaded blob successfully');
386
387
try {
388
this.log(`Submitting release for ${version}: ${filePath}`);
389
const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient);
390
391
this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`);
392
393
// Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times
394
for (let i = 0; i < 720; i++) {
395
await new Promise(c => setTimeout(c, 5000));
396
const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId);
397
398
if (releaseStatus.status === 'pass') {
399
break;
400
} else if (releaseStatus.status === 'aborted') {
401
this.log(JSON.stringify(releaseStatus));
402
throw new Error(`Release was aborted`);
403
} else if (releaseStatus.status !== 'inprogress') {
404
this.log(JSON.stringify(releaseStatus));
405
throw new Error(`Unknown error when polling for release`);
406
}
407
}
408
409
const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId);
410
411
if (releaseDetails.status !== 'pass') {
412
throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`);
413
}
414
415
this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails![0].downloadUrl);
416
return releaseDetails.files[0].fileDownloadDetails![0].downloadUrl;
417
} finally {
418
this.log(`Deleting blob ${blobClient.url}`);
419
await blobClient.delete();
420
this.log('Deleted blob successfully');
421
}
422
}
423
424
private async submitRelease(
425
version: string,
426
filePath: string,
427
friendlyFileName: string,
428
correlationId: string,
429
blobClient: BlobClient
430
): Promise<ReleaseSubmitResponse> {
431
const size = fs.statSync(filePath).size;
432
const hash = await hashStream('sha256', fs.createReadStream(filePath));
433
const blobUrl = `${blobClient.url}?${this.stagingSasToken}`;
434
435
const message: ReleaseRequestMessage = {
436
customerCorrelationId: correlationId,
437
esrpCorrelationId: correlationId,
438
driEmail: ['[email protected]'],
439
createdBy: { userPrincipalName: '[email protected]' },
440
owners: [{ owner: { userPrincipalName: '[email protected]' } }],
441
approvers: [{ approver: { userPrincipalName: '[email protected]' }, isAutoApproved: true, isMandatory: false }],
442
releaseInfo: {
443
title: 'VS Code',
444
properties: {
445
'ReleaseContentType': 'InstallPackage'
446
},
447
minimumNumberOfApprovers: 1
448
},
449
productInfo: {
450
name: 'VS Code',
451
version,
452
description: 'VS Code'
453
},
454
accessPermissionsInfo: {
455
mainPublisher: 'VSCode',
456
channelDownloadEntityDetails: {
457
AllDownloadEntities: ['VSCode']
458
}
459
},
460
routingInfo: {
461
intent: 'filedownloadlinkgeneration'
462
},
463
files: [{
464
name: path.basename(filePath),
465
friendlyFileName,
466
tenantFileLocation: blobUrl,
467
tenantFileLocationType: 'AzureBlob',
468
sourceLocation: {
469
type: 'azureBlob',
470
blobUrl
471
},
472
hashType: 'sha256',
473
hash: Array.from(hash),
474
sizeInBytes: size
475
}]
476
};
477
478
message.jwsToken = await this.generateJwsToken(message);
479
480
const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, {
481
method: 'POST',
482
headers: {
483
'Content-Type': 'application/json',
484
'Authorization': `Bearer ${this.accessToken}`
485
},
486
body: JSON.stringify(message)
487
});
488
489
if (!res.ok) {
490
const text = await res.text();
491
throw new Error(`Failed to submit release: ${res.statusText}\n${text}`);
492
}
493
494
return await res.json() as ReleaseSubmitResponse;
495
}
496
497
private async getReleaseStatus(releaseId: string): Promise<ReleaseResultMessage> {
498
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`;
499
500
const res = await retry(() => fetch(url, {
501
headers: {
502
'Authorization': `Bearer ${this.accessToken}`
503
}
504
}));
505
506
if (!res.ok) {
507
const text = await res.text();
508
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
509
}
510
511
return await res.json() as ReleaseResultMessage;
512
}
513
514
private async getReleaseDetails(releaseId: string): Promise<ReleaseDetailsMessage> {
515
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`;
516
517
const res = await retry(() => fetch(url, {
518
headers: {
519
'Authorization': `Bearer ${this.accessToken}`
520
}
521
}));
522
523
if (!res.ok) {
524
const text = await res.text();
525
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
526
}
527
528
return await res.json() as ReleaseDetailsMessage;
529
}
530
531
private async generateJwsToken(message: ReleaseRequestMessage): Promise<string> {
532
// Create header with properly typed properties, then override x5c with the non-standard string format
533
const header: jws.Header = {
534
alg: 'RS256',
535
crit: ['exp', 'x5t'],
536
// Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483)
537
exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000,
538
// Release service uses hex format, not base64url :roll_eyes:
539
x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'),
540
};
541
542
// The Release service expects x5c as a '.' separated string, not the standard array format
543
(header as Record<string, unknown>)['x5c'] = this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.');
544
545
return jws.sign({
546
header,
547
payload: message,
548
privateKey: this.requestSigningKey,
549
});
550
}
551
}
552
553
class State {
554
555
private statePath: string;
556
private set = new Set<string>();
557
558
constructor() {
559
const pipelineWorkspacePath = e('PIPELINE_WORKSPACE');
560
const previousState = fs.readdirSync(pipelineWorkspacePath)
561
.map(name => /^artifacts_processed_(\d+)$/.exec(name))
562
.filter((match): match is RegExpExecArray => !!match)
563
.map(match => ({ name: match![0], attempt: Number(match![1]) }))
564
.sort((a, b) => b.attempt - a.attempt)[0];
565
566
if (previousState) {
567
const previousStatePath = path.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt');
568
fs.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name));
569
}
570
571
const stageAttempt = e('SYSTEM_STAGEATTEMPT');
572
this.statePath = path.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`);
573
fs.mkdirSync(path.dirname(this.statePath), { recursive: true });
574
fs.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join(''));
575
}
576
577
get size(): number {
578
return this.set.size;
579
}
580
581
has(name: string): boolean {
582
return this.set.has(name);
583
}
584
585
add(name: string): void {
586
this.set.add(name);
587
fs.appendFileSync(this.statePath, `${name}\n`);
588
}
589
590
[Symbol.iterator](): IterableIterator<string> {
591
return this.set[Symbol.iterator]();
592
}
593
}
594
595
const azdoFetchOptions = {
596
headers: {
597
// Pretend we're a web browser to avoid download rate limits
598
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0',
599
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
600
'Accept-Encoding': 'gzip, deflate, br',
601
'Accept-Language': 'en-US,en;q=0.9',
602
'Referer': 'https://dev.azure.com',
603
Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}`
604
}
605
};
606
607
export async function requestAZDOAPI<T>(path: string): Promise<T> {
608
const abortController = new AbortController();
609
const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000);
610
611
try {
612
const res = await retry(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal }));
613
614
if (!res.ok) {
615
throw new Error(`Unexpected status code: ${res.status}`);
616
}
617
618
return await res.json();
619
} finally {
620
clearTimeout(timeout);
621
}
622
}
623
624
export interface Artifact {
625
readonly name: string;
626
readonly resource: {
627
readonly downloadUrl: string;
628
readonly properties: {
629
readonly artifactsize: number;
630
};
631
};
632
}
633
634
async function getPipelineArtifacts(): Promise<Artifact[]> {
635
const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts');
636
return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name));
637
}
638
639
interface Timeline {
640
readonly records: {
641
readonly name: string;
642
readonly type: string;
643
readonly state: string;
644
readonly result: string;
645
}[];
646
}
647
648
async function getPipelineTimeline(): Promise<Timeline> {
649
return await requestAZDOAPI<Timeline>('timeline');
650
}
651
652
async function downloadArtifact(artifact: Artifact, downloadPath: string): Promise<void> {
653
const abortController = new AbortController();
654
const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000);
655
656
try {
657
const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal });
658
659
if (!res.ok) {
660
throw new Error(`Unexpected status code: ${res.status}`);
661
}
662
663
await pipeline(Readable.fromWeb(res.body as ReadableStream), fs.createWriteStream(downloadPath));
664
} finally {
665
clearTimeout(timeout);
666
}
667
}
668
669
async function unzip(packagePath: string, outputPath: string): Promise<string[]> {
670
return new Promise((resolve, reject) => {
671
yauzl.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => {
672
if (err) {
673
return reject(err);
674
}
675
676
const result: string[] = [];
677
zipfile!.on('entry', entry => {
678
if (/\/$/.test(entry.fileName)) {
679
zipfile!.readEntry();
680
} else {
681
zipfile!.openReadStream(entry, (err, istream) => {
682
if (err) {
683
return reject(err);
684
}
685
686
const filePath = path.join(outputPath, entry.fileName);
687
fs.mkdirSync(path.dirname(filePath), { recursive: true });
688
689
const ostream = fs.createWriteStream(filePath);
690
ostream.on('finish', () => {
691
result.push(filePath);
692
zipfile!.readEntry();
693
});
694
istream?.on('error', err => reject(err));
695
istream!.pipe(ostream);
696
});
697
}
698
});
699
700
zipfile!.on('close', () => resolve(result));
701
zipfile!.readEntry();
702
});
703
});
704
}
705
706
interface Asset {
707
platform: string;
708
type: string;
709
url: string;
710
mooncakeUrl?: string;
711
prssUrl?: string;
712
hash: string;
713
sha256hash: string;
714
size: number;
715
supportsFastUpdate?: boolean;
716
}
717
718
// Contains all of the logic for mapping details to our actual product names in CosmosDB
719
function getPlatform(product: string, os: string, arch: string, type: string): string {
720
switch (os) {
721
case 'win32':
722
switch (product) {
723
case 'client': {
724
switch (type) {
725
case 'archive':
726
return `win32-${arch}-archive`;
727
case 'setup':
728
return `win32-${arch}`;
729
case 'user-setup':
730
return `win32-${arch}-user`;
731
default:
732
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
733
}
734
}
735
case 'server':
736
return `server-win32-${arch}`;
737
case 'web':
738
return `server-win32-${arch}-web`;
739
case 'cli':
740
return `cli-win32-${arch}`;
741
default:
742
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
743
}
744
case 'alpine':
745
switch (product) {
746
case 'server':
747
return `server-alpine-${arch}`;
748
case 'web':
749
return `server-alpine-${arch}-web`;
750
case 'cli':
751
return `cli-alpine-${arch}`;
752
default:
753
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
754
}
755
case 'linux':
756
switch (type) {
757
case 'snap':
758
return `linux-snap-${arch}`;
759
case 'archive-unsigned':
760
switch (product) {
761
case 'client':
762
return `linux-${arch}`;
763
case 'server':
764
return `server-linux-${arch}`;
765
case 'web':
766
if (arch === 'standalone') {
767
return 'web-standalone';
768
}
769
return `server-linux-${arch}-web`;
770
default:
771
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
772
}
773
case 'deb-package':
774
return `linux-deb-${arch}`;
775
case 'rpm-package':
776
return `linux-rpm-${arch}`;
777
case 'cli':
778
return `cli-linux-${arch}`;
779
default:
780
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
781
}
782
case 'darwin':
783
switch (product) {
784
case 'client':
785
switch (type) {
786
case 'dmg':
787
return `darwin-${arch}-dmg`;
788
case 'archive':
789
default:
790
if (arch === 'x64') {
791
return 'darwin';
792
}
793
return `darwin-${arch}`;
794
}
795
case 'server':
796
if (arch === 'x64') {
797
return 'server-darwin';
798
}
799
return `server-darwin-${arch}`;
800
case 'web':
801
if (arch === 'x64') {
802
return 'server-darwin-web';
803
}
804
return `server-darwin-${arch}-web`;
805
case 'cli':
806
return `cli-darwin-${arch}`;
807
default:
808
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
809
}
810
default:
811
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
812
}
813
}
814
815
// Contains all of the logic for mapping types to our actual types in CosmosDB
816
function getRealType(type: string) {
817
switch (type) {
818
case 'user-setup':
819
return 'setup';
820
case 'deb-package':
821
case 'rpm-package':
822
return 'package';
823
default:
824
return type;
825
}
826
}
827
828
async function withLease<T>(client: BlockBlobClient, fn: () => Promise<T>) {
829
const lease = client.getBlobLeaseClient();
830
831
for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes
832
try {
833
await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired
834
await lease.acquireLease(60);
835
836
try {
837
const abortController = new AbortController();
838
const refresher = new Promise<void>((c, e) => {
839
abortController.signal.onabort = () => {
840
clearInterval(interval);
841
c();
842
};
843
844
const interval = setInterval(() => {
845
lease.renewLease().catch(err => {
846
clearInterval(interval);
847
e(new Error('Failed to renew lease ' + err));
848
});
849
}, 30_000);
850
});
851
852
const result = await Promise.race([fn(), refresher]);
853
abortController.abort();
854
return result;
855
} finally {
856
await lease.releaseLease();
857
}
858
} catch (err) {
859
if (err.statusCode !== 409 && err.statusCode !== 412) {
860
throw err;
861
}
862
863
await new Promise(c => setTimeout(c, 5000));
864
}
865
}
866
867
throw new Error('Failed to acquire lease on blob after 30 minutes');
868
}
869
870
async function processArtifact(
871
artifact: Artifact,
872
filePath: string
873
) {
874
const log = (...args: unknown[]) => console.log(`[${artifact.name}]`, ...args);
875
const match = /^vscode_(?<product>[^_]+)_(?<os>[^_]+)(?:_legacy)?_(?<arch>[^_]+)_(?<unprocessedType>[^_]+)$/.exec(artifact.name);
876
877
if (!match) {
878
throw new Error(`Invalid artifact name: ${artifact.name}`);
879
}
880
881
const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS'));
882
const quality = e('VSCODE_QUALITY');
883
const version = e('BUILD_SOURCEVERSION');
884
const friendlyFileName = `${quality}/${version}/${path.basename(filePath)}`;
885
886
const blobServiceClient = new BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken });
887
const leasesContainerClient = blobServiceClient.getContainerClient('leases');
888
await leasesContainerClient.createIfNotExists();
889
const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName);
890
891
log(`Acquiring lease for: ${friendlyFileName}`);
892
893
await withLease(leaseBlobClient, async () => {
894
log(`Successfully acquired lease for: ${friendlyFileName}`);
895
896
const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`;
897
const res = await retry(() => fetch(url));
898
899
if (res.status === 200) {
900
log(`Already released and provisioned: ${url}`);
901
} else {
902
const stagingContainerClient = blobServiceClient.getContainerClient('staging');
903
await stagingContainerClient.createIfNotExists();
904
905
const now = new Date().valueOf();
906
const oneHour = 60 * 60 * 1000;
907
const oneHourAgo = new Date(now - oneHour);
908
const oneHourFromNow = new Date(now + oneHour);
909
const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow);
910
const sasOptions = { containerName: 'staging', permissions: ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow };
911
const stagingSasToken = generateBlobSASQueryParameters(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString();
912
913
const releaseService = await ESRPReleaseService.create(
914
log,
915
e('RELEASE_TENANT_ID'),
916
e('RELEASE_CLIENT_ID'),
917
e('RELEASE_AUTH_CERT'),
918
e('RELEASE_REQUEST_SIGNING_CERT'),
919
stagingContainerClient,
920
stagingSasToken
921
);
922
923
await releaseService.createRelease(version, filePath, friendlyFileName);
924
}
925
926
const { product, os, arch, unprocessedType } = match.groups!;
927
const platform = getPlatform(product, os, arch, unprocessedType);
928
const type = getRealType(unprocessedType);
929
const size = fs.statSync(filePath).size;
930
const stream = fs.createReadStream(filePath);
931
const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256
932
const asset: Asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true };
933
log('Creating asset...');
934
935
const result = await retry(async (attempt) => {
936
log(`Creating asset in Cosmos DB (attempt ${attempt})...`);
937
const client = new CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT')!, tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) });
938
const scripts = client.database('builds').container(quality).scripts;
939
const { resource: result } = await scripts.storedProcedure('createAsset').execute<'ok' | 'already exists'>('', [version, asset, true]);
940
return result;
941
});
942
943
if (result === 'already exists') {
944
log('Asset already exists!');
945
} else {
946
log('Asset successfully created: ', JSON.stringify(asset, undefined, 2));
947
}
948
});
949
950
log(`Successfully released lease for: ${friendlyFileName}`);
951
}
952
953
// It is VERY important that we don't download artifacts too much too fast from AZDO.
954
// AZDO throttles us SEVERELY if we do. Not just that, but they also close open
955
// sockets, so the whole things turns to a grinding halt. So, downloading and extracting
956
// happens serially in the main thread, making the downloads are spaced out
957
// properly. For each extracted artifact, we spawn a worker thread to upload it to
958
// the CDN and finally update the build in Cosmos DB.
959
async function main() {
960
if (!isMainThread) {
961
const { artifact, artifactFilePath } = workerData;
962
await processArtifact(artifact, artifactFilePath);
963
return;
964
}
965
966
const done = new State();
967
const processing = new Set<string>();
968
969
for (const name of done) {
970
console.log(`\u2705 ${name}`);
971
}
972
973
const stages = new Set<string>(['Compile']);
974
975
if (
976
e('VSCODE_BUILD_STAGE_LINUX') === 'True' ||
977
e('VSCODE_BUILD_STAGE_MACOS') === 'True' ||
978
e('VSCODE_BUILD_STAGE_WINDOWS') === 'True'
979
) {
980
stages.add('CompileCLI');
981
}
982
983
if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { stages.add('Windows'); }
984
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { stages.add('Linux'); }
985
if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { stages.add('Alpine'); }
986
if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { stages.add('macOS'); }
987
if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { stages.add('Web'); }
988
989
let timeline: Timeline;
990
let artifacts: Artifact[];
991
let resultPromise = Promise.resolve<PromiseSettledResult<void>[]>([]);
992
const operations: { name: string; operation: Promise<void> }[] = [];
993
994
while (true) {
995
[timeline, artifacts] = await Promise.all([retry(() => getPipelineTimeline()), retry(() => getPipelineArtifacts())]);
996
const stagesCompleted = new Set<string>(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name));
997
const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s));
998
const artifactsInProgress = artifacts.filter(a => processing.has(a.name));
999
1000
if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) {
1001
break;
1002
} else if (stagesInProgress.length > 0) {
1003
console.log('Stages in progress:', stagesInProgress.join(', '));
1004
} else if (artifactsInProgress.length > 0) {
1005
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
1006
} else {
1007
console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`);
1008
}
1009
1010
for (const artifact of artifacts) {
1011
if (done.has(artifact.name) || processing.has(artifact.name)) {
1012
continue;
1013
}
1014
1015
console.log(`[${artifact.name}] Found new artifact`);
1016
1017
const artifactZipPath = path.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`);
1018
1019
await retry(async (attempt) => {
1020
const start = Date.now();
1021
console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`);
1022
await downloadArtifact(artifact, artifactZipPath);
1023
const archiveSize = fs.statSync(artifactZipPath).size;
1024
const downloadDurationS = (Date.now() - start) / 1000;
1025
const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS);
1026
console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`);
1027
});
1028
1029
const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY'));
1030
const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0];
1031
1032
processing.add(artifact.name);
1033
const promise = new Promise<void>((resolve, reject) => {
1034
const worker = new Worker(import.meta.filename, { workerData: { artifact, artifactFilePath } });
1035
worker.on('error', reject);
1036
worker.on('exit', code => {
1037
if (code === 0) {
1038
resolve();
1039
} else {
1040
reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`));
1041
}
1042
});
1043
});
1044
1045
const operation = promise.then(() => {
1046
processing.delete(artifact.name);
1047
done.add(artifact.name);
1048
console.log(`\u2705 ${artifact.name} `);
1049
});
1050
1051
operations.push({ name: artifact.name, operation });
1052
resultPromise = Promise.allSettled(operations.map(o => o.operation));
1053
}
1054
1055
await new Promise(c => setTimeout(c, 10_000));
1056
}
1057
1058
console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`);
1059
1060
const artifactsInProgress = operations.filter(o => processing.has(o.name));
1061
1062
if (artifactsInProgress.length > 0) {
1063
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
1064
}
1065
1066
const results = await resultPromise;
1067
1068
for (let i = 0; i < operations.length; i++) {
1069
const result = results[i];
1070
1071
if (result.status === 'rejected') {
1072
console.error(`[${operations[i].name}]`, result.reason);
1073
}
1074
}
1075
1076
// Fail the job if any of the artifacts failed to publish
1077
if (results.some(r => r.status === 'rejected')) {
1078
throw new Error('Some artifacts failed to publish');
1079
}
1080
1081
// Also fail the job if any of the stages did not succeed
1082
let shouldFail = false;
1083
1084
for (const stage of stages) {
1085
const record = timeline.records.find(r => r.name === stage && r.type === 'Stage')!;
1086
1087
if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') {
1088
shouldFail = true;
1089
console.error(`Stage ${stage} did not succeed: ${record.result}`);
1090
}
1091
}
1092
1093
if (shouldFail) {
1094
throw new Error('Some stages did not succeed');
1095
}
1096
1097
console.log(`All ${done.size} artifacts published!`);
1098
}
1099
1100
if (import.meta.main) {
1101
main().then(() => {
1102
process.exit(0);
1103
}, err => {
1104
console.error(err);
1105
process.exit(1);
1106
});
1107
}
1108
1109