Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
microsoft
GitHub Repository: microsoft/vscode
Path: blob/main/build/azure-pipelines/common/publish.js
3520 views
1
"use strict";
2
var __importDefault = (this && this.__importDefault) || function (mod) {
3
return (mod && mod.__esModule) ? mod : { "default": mod };
4
};
5
Object.defineProperty(exports, "__esModule", { value: true });
6
exports.e = e;
7
exports.requestAZDOAPI = requestAZDOAPI;
8
/*---------------------------------------------------------------------------------------------
9
* Copyright (c) Microsoft Corporation. All rights reserved.
10
* Licensed under the MIT License. See License.txt in the project root for license information.
11
*--------------------------------------------------------------------------------------------*/
12
const fs_1 = __importDefault(require("fs"));
13
const path_1 = __importDefault(require("path"));
14
const stream_1 = require("stream");
15
const promises_1 = require("node:stream/promises");
16
const yauzl_1 = __importDefault(require("yauzl"));
17
const crypto_1 = __importDefault(require("crypto"));
18
const retry_1 = require("./retry");
19
const cosmos_1 = require("@azure/cosmos");
20
const child_process_1 = __importDefault(require("child_process"));
21
const os_1 = __importDefault(require("os"));
22
const node_worker_threads_1 = require("node:worker_threads");
23
const msal_node_1 = require("@azure/msal-node");
24
const storage_blob_1 = require("@azure/storage-blob");
25
const jws_1 = __importDefault(require("jws"));
26
const node_timers_1 = require("node:timers");
27
function e(name) {
28
const result = process.env[name];
29
if (typeof result !== 'string') {
30
throw new Error(`Missing env: ${name}`);
31
}
32
return result;
33
}
34
function hashStream(hashName, stream) {
35
return new Promise((c, e) => {
36
const shasum = crypto_1.default.createHash(hashName);
37
stream
38
.on('data', shasum.update.bind(shasum))
39
.on('error', e)
40
.on('close', () => c(shasum.digest()));
41
});
42
}
43
var StatusCode;
44
(function (StatusCode) {
45
StatusCode["Pass"] = "pass";
46
StatusCode["Aborted"] = "aborted";
47
StatusCode["Inprogress"] = "inprogress";
48
StatusCode["FailCanRetry"] = "failCanRetry";
49
StatusCode["FailDoNotRetry"] = "failDoNotRetry";
50
StatusCode["PendingAnalysis"] = "pendingAnalysis";
51
StatusCode["Cancelled"] = "cancelled";
52
})(StatusCode || (StatusCode = {}));
53
function getCertificateBuffer(input) {
54
return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64');
55
}
56
function getThumbprint(input, algorithm) {
57
const buffer = getCertificateBuffer(input);
58
return crypto_1.default.createHash(algorithm).update(buffer).digest();
59
}
60
function getKeyFromPFX(pfx) {
61
const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx');
62
const pemKeyPath = path_1.default.join(os_1.default.tmpdir(), 'key.pem');
63
try {
64
const pfxCertificate = Buffer.from(pfx, 'base64');
65
fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate);
66
child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`);
67
const raw = fs_1.default.readFileSync(pemKeyPath, 'utf-8');
68
const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)[0];
69
return result;
70
}
71
finally {
72
fs_1.default.rmSync(pfxCertificatePath, { force: true });
73
fs_1.default.rmSync(pemKeyPath, { force: true });
74
}
75
}
76
function getCertificatesFromPFX(pfx) {
77
const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx');
78
const pemCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pem');
79
try {
80
const pfxCertificate = Buffer.from(pfx, 'base64');
81
fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate);
82
child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`);
83
const raw = fs_1.default.readFileSync(pemCertificatePath, 'utf-8');
84
const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g);
85
return matches ? matches.reverse() : [];
86
}
87
finally {
88
fs_1.default.rmSync(pfxCertificatePath, { force: true });
89
fs_1.default.rmSync(pemCertificatePath, { force: true });
90
}
91
}
92
class ESRPReleaseService {
93
log;
94
clientId;
95
accessToken;
96
requestSigningCertificates;
97
requestSigningKey;
98
containerClient;
99
stagingSasToken;
100
static async create(log, tenantId, clientId, authCertificatePfx, requestSigningCertificatePfx, containerClient, stagingSasToken) {
101
const authKey = getKeyFromPFX(authCertificatePfx);
102
const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0];
103
const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx);
104
const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx);
105
const app = new msal_node_1.ConfidentialClientApplication({
106
auth: {
107
clientId,
108
authority: `https://login.microsoftonline.com/${tenantId}`,
109
clientCertificate: {
110
thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'),
111
privateKey: authKey,
112
x5c: authCertificate
113
}
114
}
115
});
116
const response = await app.acquireTokenByClientCredential({
117
scopes: ['https://api.esrp.microsoft.com/.default']
118
});
119
return new ESRPReleaseService(log, clientId, response.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken);
120
}
121
static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/';
122
constructor(log, clientId, accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken) {
123
this.log = log;
124
this.clientId = clientId;
125
this.accessToken = accessToken;
126
this.requestSigningCertificates = requestSigningCertificates;
127
this.requestSigningKey = requestSigningKey;
128
this.containerClient = containerClient;
129
this.stagingSasToken = stagingSasToken;
130
}
131
async createRelease(version, filePath, friendlyFileName) {
132
const correlationId = crypto_1.default.randomUUID();
133
const blobClient = this.containerClient.getBlockBlobClient(correlationId);
134
this.log(`Uploading ${filePath} to ${blobClient.url}`);
135
await blobClient.uploadFile(filePath);
136
this.log('Uploaded blob successfully');
137
try {
138
this.log(`Submitting release for ${version}: ${filePath}`);
139
const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient);
140
this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`);
141
// Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times
142
for (let i = 0; i < 720; i++) {
143
await new Promise(c => setTimeout(c, 5000));
144
const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId);
145
if (releaseStatus.status === 'pass') {
146
break;
147
}
148
else if (releaseStatus.status === 'aborted') {
149
this.log(JSON.stringify(releaseStatus));
150
throw new Error(`Release was aborted`);
151
}
152
else if (releaseStatus.status !== 'inprogress') {
153
this.log(JSON.stringify(releaseStatus));
154
throw new Error(`Unknown error when polling for release`);
155
}
156
}
157
const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId);
158
if (releaseDetails.status !== 'pass') {
159
throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`);
160
}
161
this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails[0].downloadUrl);
162
return releaseDetails.files[0].fileDownloadDetails[0].downloadUrl;
163
}
164
finally {
165
this.log(`Deleting blob ${blobClient.url}`);
166
await blobClient.delete();
167
this.log('Deleted blob successfully');
168
}
169
}
170
async submitRelease(version, filePath, friendlyFileName, correlationId, blobClient) {
171
const size = fs_1.default.statSync(filePath).size;
172
const hash = await hashStream('sha256', fs_1.default.createReadStream(filePath));
173
const blobUrl = `${blobClient.url}?${this.stagingSasToken}`;
174
const message = {
175
customerCorrelationId: correlationId,
176
esrpCorrelationId: correlationId,
177
driEmail: ['[email protected]'],
178
createdBy: { userPrincipalName: '[email protected]' },
179
owners: [{ owner: { userPrincipalName: '[email protected]' } }],
180
approvers: [{ approver: { userPrincipalName: '[email protected]' }, isAutoApproved: true, isMandatory: false }],
181
releaseInfo: {
182
title: 'VS Code',
183
properties: {
184
'ReleaseContentType': 'InstallPackage'
185
},
186
minimumNumberOfApprovers: 1
187
},
188
productInfo: {
189
name: 'VS Code',
190
version,
191
description: 'VS Code'
192
},
193
accessPermissionsInfo: {
194
mainPublisher: 'VSCode',
195
channelDownloadEntityDetails: {
196
AllDownloadEntities: ['VSCode']
197
}
198
},
199
routingInfo: {
200
intent: 'filedownloadlinkgeneration'
201
},
202
files: [{
203
name: path_1.default.basename(filePath),
204
friendlyFileName,
205
tenantFileLocation: blobUrl,
206
tenantFileLocationType: 'AzureBlob',
207
sourceLocation: {
208
type: 'azureBlob',
209
blobUrl
210
},
211
hashType: 'sha256',
212
hash: Array.from(hash),
213
sizeInBytes: size
214
}]
215
};
216
message.jwsToken = await this.generateJwsToken(message);
217
const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, {
218
method: 'POST',
219
headers: {
220
'Content-Type': 'application/json',
221
'Authorization': `Bearer ${this.accessToken}`
222
},
223
body: JSON.stringify(message)
224
});
225
if (!res.ok) {
226
const text = await res.text();
227
throw new Error(`Failed to submit release: ${res.statusText}\n${text}`);
228
}
229
return await res.json();
230
}
231
async getReleaseStatus(releaseId) {
232
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`;
233
const res = await (0, retry_1.retry)(() => fetch(url, {
234
headers: {
235
'Authorization': `Bearer ${this.accessToken}`
236
}
237
}));
238
if (!res.ok) {
239
const text = await res.text();
240
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
241
}
242
return await res.json();
243
}
244
async getReleaseDetails(releaseId) {
245
const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`;
246
const res = await (0, retry_1.retry)(() => fetch(url, {
247
headers: {
248
'Authorization': `Bearer ${this.accessToken}`
249
}
250
}));
251
if (!res.ok) {
252
const text = await res.text();
253
throw new Error(`Failed to get release status: ${res.statusText}\n${text}`);
254
}
255
return await res.json();
256
}
257
async generateJwsToken(message) {
258
return jws_1.default.sign({
259
header: {
260
alg: 'RS256',
261
crit: ['exp', 'x5t'],
262
// Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483)
263
exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000,
264
// Release service uses hex format, not base64url :roll_eyes:
265
x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'),
266
// Release service uses a '.' separated string, not an array of strings :roll_eyes:
267
x5c: this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.'),
268
},
269
payload: message,
270
privateKey: this.requestSigningKey,
271
});
272
}
273
}
274
class State {
275
statePath;
276
set = new Set();
277
constructor() {
278
const pipelineWorkspacePath = e('PIPELINE_WORKSPACE');
279
const previousState = fs_1.default.readdirSync(pipelineWorkspacePath)
280
.map(name => /^artifacts_processed_(\d+)$/.exec(name))
281
.filter((match) => !!match)
282
.map(match => ({ name: match[0], attempt: Number(match[1]) }))
283
.sort((a, b) => b.attempt - a.attempt)[0];
284
if (previousState) {
285
const previousStatePath = path_1.default.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt');
286
fs_1.default.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name));
287
}
288
const stageAttempt = e('SYSTEM_STAGEATTEMPT');
289
this.statePath = path_1.default.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`);
290
fs_1.default.mkdirSync(path_1.default.dirname(this.statePath), { recursive: true });
291
fs_1.default.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join(''));
292
}
293
get size() {
294
return this.set.size;
295
}
296
has(name) {
297
return this.set.has(name);
298
}
299
add(name) {
300
this.set.add(name);
301
fs_1.default.appendFileSync(this.statePath, `${name}\n`);
302
}
303
[Symbol.iterator]() {
304
return this.set[Symbol.iterator]();
305
}
306
}
307
const azdoFetchOptions = {
308
headers: {
309
// Pretend we're a web browser to avoid download rate limits
310
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0',
311
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
312
'Accept-Encoding': 'gzip, deflate, br',
313
'Accept-Language': 'en-US,en;q=0.9',
314
'Referer': 'https://dev.azure.com',
315
Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}`
316
}
317
};
318
async function requestAZDOAPI(path) {
319
const abortController = new AbortController();
320
const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000);
321
try {
322
const res = await (0, retry_1.retry)(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal }));
323
if (!res.ok) {
324
throw new Error(`Unexpected status code: ${res.status}`);
325
}
326
return await res.json();
327
}
328
finally {
329
clearTimeout(timeout);
330
}
331
}
332
async function getPipelineArtifacts() {
333
const result = await requestAZDOAPI('artifacts');
334
return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name));
335
}
336
async function getPipelineTimeline() {
337
return await requestAZDOAPI('timeline');
338
}
339
async function downloadArtifact(artifact, downloadPath) {
340
const abortController = new AbortController();
341
const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000);
342
try {
343
const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal });
344
if (!res.ok) {
345
throw new Error(`Unexpected status code: ${res.status}`);
346
}
347
await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs_1.default.createWriteStream(downloadPath));
348
}
349
finally {
350
clearTimeout(timeout);
351
}
352
}
353
async function unzip(packagePath, outputPath) {
354
return new Promise((resolve, reject) => {
355
yauzl_1.default.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => {
356
if (err) {
357
return reject(err);
358
}
359
const result = [];
360
zipfile.on('entry', entry => {
361
if (/\/$/.test(entry.fileName)) {
362
zipfile.readEntry();
363
}
364
else {
365
zipfile.openReadStream(entry, (err, istream) => {
366
if (err) {
367
return reject(err);
368
}
369
const filePath = path_1.default.join(outputPath, entry.fileName);
370
fs_1.default.mkdirSync(path_1.default.dirname(filePath), { recursive: true });
371
const ostream = fs_1.default.createWriteStream(filePath);
372
ostream.on('finish', () => {
373
result.push(filePath);
374
zipfile.readEntry();
375
});
376
istream?.on('error', err => reject(err));
377
istream.pipe(ostream);
378
});
379
}
380
});
381
zipfile.on('close', () => resolve(result));
382
zipfile.readEntry();
383
});
384
});
385
}
386
// Contains all of the logic for mapping details to our actual product names in CosmosDB
387
function getPlatform(product, os, arch, type) {
388
switch (os) {
389
case 'win32':
390
switch (product) {
391
case 'client': {
392
switch (type) {
393
case 'archive':
394
return `win32-${arch}-archive`;
395
case 'setup':
396
return `win32-${arch}`;
397
case 'user-setup':
398
return `win32-${arch}-user`;
399
default:
400
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
401
}
402
}
403
case 'server':
404
return `server-win32-${arch}`;
405
case 'web':
406
return `server-win32-${arch}-web`;
407
case 'cli':
408
return `cli-win32-${arch}`;
409
default:
410
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
411
}
412
case 'alpine':
413
switch (product) {
414
case 'server':
415
return `server-alpine-${arch}`;
416
case 'web':
417
return `server-alpine-${arch}-web`;
418
case 'cli':
419
return `cli-alpine-${arch}`;
420
default:
421
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
422
}
423
case 'linux':
424
switch (type) {
425
case 'snap':
426
return `linux-snap-${arch}`;
427
case 'archive-unsigned':
428
switch (product) {
429
case 'client':
430
return `linux-${arch}`;
431
case 'server':
432
return `server-linux-${arch}`;
433
case 'web':
434
if (arch === 'standalone') {
435
return 'web-standalone';
436
}
437
return `server-linux-${arch}-web`;
438
default:
439
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
440
}
441
case 'deb-package':
442
return `linux-deb-${arch}`;
443
case 'rpm-package':
444
return `linux-rpm-${arch}`;
445
case 'cli':
446
return `cli-linux-${arch}`;
447
default:
448
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
449
}
450
case 'darwin':
451
switch (product) {
452
case 'client':
453
if (arch === 'x64') {
454
return 'darwin';
455
}
456
return `darwin-${arch}`;
457
case 'server':
458
if (arch === 'x64') {
459
return 'server-darwin';
460
}
461
return `server-darwin-${arch}`;
462
case 'web':
463
if (arch === 'x64') {
464
return 'server-darwin-web';
465
}
466
return `server-darwin-${arch}-web`;
467
case 'cli':
468
return `cli-darwin-${arch}`;
469
default:
470
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
471
}
472
default:
473
throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`);
474
}
475
}
476
// Contains all of the logic for mapping types to our actual types in CosmosDB
477
function getRealType(type) {
478
switch (type) {
479
case 'user-setup':
480
return 'setup';
481
case 'deb-package':
482
case 'rpm-package':
483
return 'package';
484
default:
485
return type;
486
}
487
}
488
async function withLease(client, fn) {
489
const lease = client.getBlobLeaseClient();
490
for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes
491
try {
492
await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired
493
await lease.acquireLease(60);
494
try {
495
const abortController = new AbortController();
496
const refresher = new Promise((c, e) => {
497
abortController.signal.onabort = () => {
498
(0, node_timers_1.clearInterval)(interval);
499
c();
500
};
501
const interval = (0, node_timers_1.setInterval)(() => {
502
lease.renewLease().catch(err => {
503
(0, node_timers_1.clearInterval)(interval);
504
e(new Error('Failed to renew lease ' + err));
505
});
506
}, 30_000);
507
});
508
const result = await Promise.race([fn(), refresher]);
509
abortController.abort();
510
return result;
511
}
512
finally {
513
await lease.releaseLease();
514
}
515
}
516
catch (err) {
517
if (err.statusCode !== 409 && err.statusCode !== 412) {
518
throw err;
519
}
520
await new Promise(c => setTimeout(c, 5000));
521
}
522
}
523
throw new Error('Failed to acquire lease on blob after 30 minutes');
524
}
525
async function processArtifact(artifact, filePath) {
526
const log = (...args) => console.log(`[${artifact.name}]`, ...args);
527
const match = /^vscode_(?<product>[^_]+)_(?<os>[^_]+)(?:_legacy)?_(?<arch>[^_]+)_(?<unprocessedType>[^_]+)$/.exec(artifact.name);
528
if (!match) {
529
throw new Error(`Invalid artifact name: ${artifact.name}`);
530
}
531
const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS'));
532
const quality = e('VSCODE_QUALITY');
533
const version = e('BUILD_SOURCEVERSION');
534
const friendlyFileName = `${quality}/${version}/${path_1.default.basename(filePath)}`;
535
const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken });
536
const leasesContainerClient = blobServiceClient.getContainerClient('leases');
537
await leasesContainerClient.createIfNotExists();
538
const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName);
539
log(`Acquiring lease for: ${friendlyFileName}`);
540
await withLease(leaseBlobClient, async () => {
541
log(`Successfully acquired lease for: ${friendlyFileName}`);
542
const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`;
543
const res = await (0, retry_1.retry)(() => fetch(url));
544
if (res.status === 200) {
545
log(`Already released and provisioned: ${url}`);
546
}
547
else {
548
const stagingContainerClient = blobServiceClient.getContainerClient('staging');
549
await stagingContainerClient.createIfNotExists();
550
const now = new Date().valueOf();
551
const oneHour = 60 * 60 * 1000;
552
const oneHourAgo = new Date(now - oneHour);
553
const oneHourFromNow = new Date(now + oneHour);
554
const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow);
555
const sasOptions = { containerName: 'staging', permissions: storage_blob_1.ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow };
556
const stagingSasToken = (0, storage_blob_1.generateBlobSASQueryParameters)(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString();
557
const releaseService = await ESRPReleaseService.create(log, e('RELEASE_TENANT_ID'), e('RELEASE_CLIENT_ID'), e('RELEASE_AUTH_CERT'), e('RELEASE_REQUEST_SIGNING_CERT'), stagingContainerClient, stagingSasToken);
558
await releaseService.createRelease(version, filePath, friendlyFileName);
559
}
560
const { product, os, arch, unprocessedType } = match.groups;
561
const platform = getPlatform(product, os, arch, unprocessedType);
562
const type = getRealType(unprocessedType);
563
const size = fs_1.default.statSync(filePath).size;
564
const stream = fs_1.default.createReadStream(filePath);
565
const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256
566
const asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true };
567
log('Creating asset...');
568
const result = await (0, retry_1.retry)(async (attempt) => {
569
log(`Creating asset in Cosmos DB (attempt ${attempt})...`);
570
const client = new cosmos_1.CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT'), tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) });
571
const scripts = client.database('builds').container(quality).scripts;
572
const { resource: result } = await scripts.storedProcedure('createAsset').execute('', [version, asset, true]);
573
return result;
574
});
575
if (result === 'already exists') {
576
log('Asset already exists!');
577
}
578
else {
579
log('Asset successfully created: ', JSON.stringify(asset, undefined, 2));
580
}
581
});
582
log(`Successfully released lease for: ${friendlyFileName}`);
583
}
584
// It is VERY important that we don't download artifacts too much too fast from AZDO.
585
// AZDO throttles us SEVERELY if we do. Not just that, but they also close open
586
// sockets, so the whole things turns to a grinding halt. So, downloading and extracting
587
// happens serially in the main thread, making the downloads are spaced out
588
// properly. For each extracted artifact, we spawn a worker thread to upload it to
589
// the CDN and finally update the build in Cosmos DB.
590
async function main() {
591
if (!node_worker_threads_1.isMainThread) {
592
const { artifact, artifactFilePath } = node_worker_threads_1.workerData;
593
await processArtifact(artifact, artifactFilePath);
594
return;
595
}
596
const done = new State();
597
const processing = new Set();
598
for (const name of done) {
599
console.log(`\u2705 ${name}`);
600
}
601
const stages = new Set(['Compile']);
602
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True' ||
603
e('VSCODE_BUILD_STAGE_ALPINE') === 'True' ||
604
e('VSCODE_BUILD_STAGE_MACOS') === 'True' ||
605
e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') {
606
stages.add('CompileCLI');
607
}
608
if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') {
609
stages.add('Windows');
610
}
611
if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') {
612
stages.add('Linux');
613
}
614
if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') {
615
stages.add('Alpine');
616
}
617
if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') {
618
stages.add('macOS');
619
}
620
if (e('VSCODE_BUILD_STAGE_WEB') === 'True') {
621
stages.add('Web');
622
}
623
let timeline;
624
let artifacts;
625
let resultPromise = Promise.resolve([]);
626
const operations = [];
627
while (true) {
628
[timeline, artifacts] = await Promise.all([(0, retry_1.retry)(() => getPipelineTimeline()), (0, retry_1.retry)(() => getPipelineArtifacts())]);
629
const stagesCompleted = new Set(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name));
630
const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s));
631
const artifactsInProgress = artifacts.filter(a => processing.has(a.name));
632
if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) {
633
break;
634
}
635
else if (stagesInProgress.length > 0) {
636
console.log('Stages in progress:', stagesInProgress.join(', '));
637
}
638
else if (artifactsInProgress.length > 0) {
639
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
640
}
641
else {
642
console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`);
643
}
644
for (const artifact of artifacts) {
645
if (done.has(artifact.name) || processing.has(artifact.name)) {
646
continue;
647
}
648
console.log(`[${artifact.name}] Found new artifact`);
649
const artifactZipPath = path_1.default.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`);
650
await (0, retry_1.retry)(async (attempt) => {
651
const start = Date.now();
652
console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`);
653
await downloadArtifact(artifact, artifactZipPath);
654
const archiveSize = fs_1.default.statSync(artifactZipPath).size;
655
const downloadDurationS = (Date.now() - start) / 1000;
656
const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS);
657
console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`);
658
});
659
const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY'));
660
const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0];
661
processing.add(artifact.name);
662
const promise = new Promise((resolve, reject) => {
663
const worker = new node_worker_threads_1.Worker(__filename, { workerData: { artifact, artifactFilePath } });
664
worker.on('error', reject);
665
worker.on('exit', code => {
666
if (code === 0) {
667
resolve();
668
}
669
else {
670
reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`));
671
}
672
});
673
});
674
const operation = promise.then(() => {
675
processing.delete(artifact.name);
676
done.add(artifact.name);
677
console.log(`\u2705 ${artifact.name} `);
678
});
679
operations.push({ name: artifact.name, operation });
680
resultPromise = Promise.allSettled(operations.map(o => o.operation));
681
}
682
await new Promise(c => setTimeout(c, 10_000));
683
}
684
console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`);
685
const artifactsInProgress = operations.filter(o => processing.has(o.name));
686
if (artifactsInProgress.length > 0) {
687
console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', '));
688
}
689
const results = await resultPromise;
690
for (let i = 0; i < operations.length; i++) {
691
const result = results[i];
692
if (result.status === 'rejected') {
693
console.error(`[${operations[i].name}]`, result.reason);
694
}
695
}
696
// Fail the job if any of the artifacts failed to publish
697
if (results.some(r => r.status === 'rejected')) {
698
throw new Error('Some artifacts failed to publish');
699
}
700
// Also fail the job if any of the stages did not succeed
701
let shouldFail = false;
702
for (const stage of stages) {
703
const record = timeline.records.find(r => r.name === stage && r.type === 'Stage');
704
if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') {
705
shouldFail = true;
706
console.error(`Stage ${stage} did not succeed: ${record.result}`);
707
}
708
}
709
if (shouldFail) {
710
throw new Error('Some stages did not succeed');
711
}
712
console.log(`All ${done.size} artifacts published!`);
713
}
714
if (require.main === module) {
715
main().then(() => {
716
process.exit(0);
717
}, err => {
718
console.error(err);
719
process.exit(1);
720
});
721
}
722
//# sourceMappingURL=publish.js.map
723