chore(vscode): update to 1.53.2
These conflicts will be resolved in the following commits. We do it this way so that PR review is possible.
This commit is contained in:
@ -1 +1 @@
|
||||
2020-11-30T16:21:34.566Z
|
||||
2021-01-28T11:52:11.376Z
|
||||
|
3
lib/vscode/build/.gitignore
vendored
3
lib/vscode/build/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
azure-pipelines/**/*.js
|
||||
darwin/**/*.js
|
||||
lib/**/*.js
|
@ -1,2 +0,0 @@
|
||||
node_modules/
|
||||
*.js
|
@ -0,0 +1,25 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const crypto = require("crypto");
|
||||
const { dirs } = require('../../npm/dirs');
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
const shasum = crypto.createHash('sha1');
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
// Add any other command line arguments
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
shasum.update(process.argv[i]);
|
||||
}
|
||||
process.stdout.write(shasum.digest('hex'));
|
@ -0,0 +1,32 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as crypto from 'crypto';
|
||||
const { dirs } = require('../../npm/dirs');
|
||||
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
|
||||
const shasum = crypto.createHash('sha1');
|
||||
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
|
||||
// Add any other command line arguments
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
shasum.update(process.argv[i]);
|
||||
}
|
||||
|
||||
process.stdout.write(shasum.digest('hex'));
|
94
lib/vscode/build/azure-pipelines/common/createAsset.js
Normal file
94
lib/vscode/build/azure-pipelines/common/createAsset.js
Normal file
@ -0,0 +1,94 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const crypto = require("crypto");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 6) {
|
||||
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
function hashStream(hashName, stream) {
|
||||
return new Promise((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
async function doesAssetExist(blobService, quality, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, quality, blobName, filePath, fileName) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(filePath),
|
||||
contentDisposition: `attachment; filename="${fileName}"`,
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function main() {
|
||||
const [, , platform, type, fileName, filePath] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
console.log('Creating asset...');
|
||||
const stat = await new Promise((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
console.log('Size:', size);
|
||||
const stream = fs.createReadStream(filePath);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
const blobName = commit + '/' + fileName;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobService, quality, blobName, filePath, fileName);
|
||||
console.log('Blobs successfully uploaded.');
|
||||
const asset = {
|
||||
platform,
|
||||
type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
51
lib/vscode/build/azure-pipelines/common/createBuild.js
Normal file
51
lib/vscode/build/azure-pipelines/common/createBuild.js
Normal file
@ -0,0 +1,51 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
process.exit(-1);
|
||||
}
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function main() {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
const build = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
@ -10,8 +10,8 @@ git clone --depth 1 https://github.com/microsoft/vscode-node-debug2.git
|
||||
git clone --depth 1 https://github.com/microsoft/vscode-node-debug.git
|
||||
git clone --depth 1 https://github.com/microsoft/vscode-html-languageservice.git
|
||||
git clone --depth 1 https://github.com/microsoft/vscode-json-languageservice.git
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
|
||||
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
|
||||
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
|
||||
|
14
lib/vscode/build/azure-pipelines/common/installPlaywright.js
Normal file
14
lib/vscode/build/azure-pipelines/common/installPlaywright.js
Normal file
@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const retry_1 = require("./retry");
|
||||
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
|
||||
const playwrightPath = path.dirname(require.resolve('playwright'));
|
||||
async function install() {
|
||||
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
|
||||
}
|
||||
install();
|
15
lib/vscode/build/azure-pipelines/common/installPlaywright.ts
Normal file
15
lib/vscode/build/azure-pipelines/common/installPlaywright.ts
Normal file
@ -0,0 +1,15 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'path';
|
||||
import { retry } from './retry';
|
||||
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
|
||||
const playwrightPath = path.dirname(require.resolve('playwright'));
|
||||
|
||||
async function install() {
|
||||
await retry(() => installBrowsersWithProgressBar(playwrightPath));
|
||||
}
|
||||
|
||||
install();
|
40
lib/vscode/build/azure-pipelines/common/listNodeModules.js
Normal file
40
lib/vscode/build/azure-pipelines/common/listNodeModules.js
Normal file
@ -0,0 +1,40 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
function findNodeModulesFiles(location, inNodeModules, result) {
|
||||
const entries = fs.readdirSync(path.join(ROOT, location));
|
||||
for (const entry of entries) {
|
||||
const entryPath = `${location}/${entry}`;
|
||||
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
|
||||
continue;
|
||||
}
|
||||
let stat;
|
||||
try {
|
||||
stat = fs.statSync(path.join(ROOT, entryPath));
|
||||
}
|
||||
catch (err) {
|
||||
continue;
|
||||
}
|
||||
if (stat.isDirectory()) {
|
||||
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
|
||||
}
|
||||
else {
|
||||
if (inNodeModules) {
|
||||
result.push(entryPath.substr(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const result = [];
|
||||
findNodeModulesFiles('', false, result);
|
||||
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');
|
46
lib/vscode/build/azure-pipelines/common/listNodeModules.ts
Normal file
46
lib/vscode/build/azure-pipelines/common/listNodeModules.ts
Normal file
@ -0,0 +1,46 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
|
||||
function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) {
|
||||
const entries = fs.readdirSync(path.join(ROOT, location));
|
||||
for (const entry of entries) {
|
||||
const entryPath = `${location}/${entry}`;
|
||||
|
||||
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let stat: fs.Stats;
|
||||
try {
|
||||
stat = fs.statSync(path.join(ROOT, entryPath));
|
||||
} catch (err) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
|
||||
} else {
|
||||
if (inNodeModules) {
|
||||
result.push(entryPath.substr(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const result: string[] = [];
|
||||
findNodeModulesFiles('', false, result);
|
||||
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');
|
71
lib/vscode/build/azure-pipelines/common/publish-webview.js
Normal file
71
lib/vscode/build/azure-pipelines/common/publish-webview.js
Normal file
@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const minimist = require("minimist");
|
||||
const path_1 = require("path");
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
async function assertContainer(blobService, container) {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
async function doesBlobExist(blobService, container, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, container, blobName, file) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
async function publish(commit, files) {
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, commit);
|
||||
for (const file of files) {
|
||||
const blobName = path_1.basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
const files = fileNames.map(fileName => path_1.join(directory, fileName));
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
50
lib/vscode/build/azure-pipelines/common/releaseBuild.js
Normal file
50
lib/vscode/build/azure-pipelines/common/releaseBuild.js
Normal file
@ -0,0 +1,50 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
async function getConfig(client, quality) {
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||
if (res.resources.length === 0) {
|
||||
return createDefaultConfig(quality);
|
||||
}
|
||||
return res.resources[0];
|
||||
}
|
||||
async function main() {
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
console.log('Quality config:', config);
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
25
lib/vscode/build/azure-pipelines/common/retry.js
Normal file
25
lib/vscode/build/azure-pipelines/common/retry.js
Normal file
@ -0,0 +1,25 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retry = void 0;
|
||||
async function retry(fn) {
|
||||
for (let run = 1; run <= 10; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
}
|
||||
catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
|
||||
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
|
||||
// maximum delay is 10th retry: ~3 seconds
|
||||
await new Promise(c => setTimeout(c, millis));
|
||||
}
|
||||
}
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
exports.retry = retry;
|
87
lib/vscode/build/azure-pipelines/common/sync-mooncake.js
Normal file
87
lib/vscode/build/azure-pipelines/common/sync-mooncake.js
Normal file
@ -0,0 +1,87 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = require("url");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
function log(...args) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
function error(...args) {
|
||||
console.error(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
error('Usage: node sync-mooncake.js <quality>');
|
||||
process.exit(-1);
|
||||
}
|
||||
async function sync(commit, quality) {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const container = client.database('builds').container(quality);
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||
const res = await container.items.query(query, {}).fetchAll();
|
||||
if (res.resources.length !== 1) {
|
||||
throw new Error(`No builds found for ${commit}`);
|
||||
}
|
||||
const build = res.resources[0];
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
for (const asset of build.assets) {
|
||||
try {
|
||||
const blobPath = url.parse(asset.url).path;
|
||||
if (!blobPath) {
|
||||
throw new Error(`Failed to parse URL: ${asset.url}`);
|
||||
}
|
||||
const blobName = blobPath.replace(/^\/\w+\//, '');
|
||||
log(`Found ${blobName}`);
|
||||
if (asset.mooncakeUrl) {
|
||||
log(` Already in Mooncake ✔️`);
|
||||
continue;
|
||||
}
|
||||
const readStream = blobService.createReadStream(quality, blobName, undefined);
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(blobPath),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
|
||||
log(` Uploading to Mooncake...`);
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
log(` Updating build in DB...`);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
||||
log(` Done ✔️`);
|
||||
}
|
||||
catch (err) {
|
||||
error(err);
|
||||
}
|
||||
}
|
||||
log(`All done ✔️`);
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
error('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const quality = process.argv[2];
|
||||
sync(commit, quality).catch(err => {
|
||||
error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
main();
|
@ -1,79 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
|
||||
- script: |
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
- script: |
|
||||
yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run Unit Tests (Browser)
|
||||
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests (Electron)
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: crash-dump-macos
|
||||
targetPath: .build/crashes
|
||||
displayName: "Publish Crash Reports"
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
@ -1,10 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
@ -1,32 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
@ -34,9 +9,21 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
@ -55,40 +42,45 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@7.1.0
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
export CHILD_CONCURRENCY="1"
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
@ -98,25 +90,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -126,10 +112,21 @@ steps:
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
|
||||
yarn electron-rebuild
|
||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
||||
cd ./node_modules/keytar
|
||||
node-gyp rebuild
|
||||
displayName: Rebuild native modules for ARM64
|
||||
condition: eq(variables['VSCODE_ARCH'], 'arm64')
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- download: current
|
||||
artifact: vscode-darwin-x64
|
||||
displayName: Download x64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- download: current
|
||||
artifact: vscode-darwin-arm64
|
||||
displayName: Download arm64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -141,6 +138,7 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -148,14 +146,23 @@ steps:
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build reh
|
||||
displayName: Build Server
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn electron $(VSCODE_ARCH)
|
||||
displayName: Download Electron
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
|
||||
unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
|
||||
DEBUG=* node build/darwin/create-universal-app.js
|
||||
displayName: Create Universal App
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -167,17 +174,26 @@ steps:
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||
displayName: Set Hardened Entitlements
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/integration/browser compile
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@ -191,13 +207,15 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@ -208,22 +226,29 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/smoke compile
|
||||
displayName: Compile smoke tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME"
|
||||
continueOnError: true
|
||||
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
yarn smoketest --web --headless
|
||||
continueOnError: true
|
||||
yarn smoketest-no-compile --web --headless
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@ -240,12 +265,13 @@ steps:
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
|
||||
displayName: Archive build
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@ -270,10 +296,12 @@ steps:
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Codesign
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
displayName: Clean
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
@ -281,6 +309,7 @@ steps:
|
||||
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
||||
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
||||
displayName: Export bundle identifier
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@ -305,6 +334,7 @@ steps:
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Notarization
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -312,7 +342,7 @@ steps:
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
|
||||
displayName: Verify start after signing (export configuration)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -323,13 +353,29 @@ steps:
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/darwin/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-darwin-$(VSCODE_ARCH)
|
||||
displayName: Publish archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
|
||||
artifact: vscode-server-darwin-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
|
||||
artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
yarn gulp upload-vscode-configuration
|
||||
displayName: Upload configuration (for Bing settings search)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
continueOnError: true
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
|
@ -5,6 +5,7 @@ set -e
|
||||
case $VSCODE_ARCH in
|
||||
x64) ASSET_ID="darwin" ;;
|
||||
arm64) ASSET_ID="darwin-arm64" ;;
|
||||
universal) ASSET_ID="darwin-universal" ;;
|
||||
esac
|
||||
|
||||
# publish the build
|
||||
|
@ -8,7 +8,7 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
|
@ -2,11 +2,12 @@ pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
|
@ -22,7 +22,7 @@ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
rm -rf $ROOT/vscode-server-*-web.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
@ -1,92 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
|
||||
- script: |
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run Unit Tests (Browser)
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests (Electron)
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: crash-dump-linux
|
||||
targetPath: .build/crashes
|
||||
displayName: "Publish Crash Reports"
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- task: Docker@1
|
||||
displayName: "Pull image"
|
||||
inputs:
|
||||
@ -45,7 +35,6 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
@ -58,30 +47,35 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n "alpine" > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "alpine" $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@ -90,21 +84,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -113,7 +105,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
docker run -e VSCODE_QUALITY -e CHILD_CONCURRENCY=1 -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
|
||||
docker run -e VSCODE_QUALITY -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
|
||||
displayName: Prebuild
|
||||
|
||||
- script: |
|
||||
@ -129,6 +121,17 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
./build/azure-pipelines/linux/alpine/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
|
||||
artifact: vscode-server-linux-alpine
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
|
||||
artifact: vscode-server-linux-alpine-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
@ -48,31 +38,56 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(NPM_ARCH)
|
||||
export CHILD_CONCURRENCY="1"
|
||||
export npm_config_build_from_source=true
|
||||
|
||||
if [ -z "$CC" ] || [ -z "$CXX" ]; then
|
||||
export CC=$(which gcc-5)
|
||||
export CXX=$(which g++-5)
|
||||
fi
|
||||
|
||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
||||
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
||||
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
|
||||
fi
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@ -81,21 +96,23 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
||||
cd ./node_modules/keytar
|
||||
npx node-gyp rebuild
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -106,28 +123,41 @@ steps:
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build Server
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/integration/browser compile
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@ -142,6 +172,7 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@ -149,6 +180,7 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@ -160,6 +192,7 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
@ -182,17 +215,20 @@ steps:
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
displayName: Build deb, rpm packages
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||
displayName: Prepare snap package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
# needed for code signing
|
||||
- task: UseDotNet@2
|
||||
displayName: "Install .NET Core SDK 2.x"
|
||||
inputs:
|
||||
version: 2.x
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@ -212,6 +248,7 @@ steps:
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign rpm
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -221,12 +258,34 @@ steps:
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(DEB_PATH)
|
||||
artifact: vscode-linux-deb-$(VSCODE_ARCH)
|
||||
displayName: Publish deb package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(RPM_PATH)
|
||||
artifact: vscode-linux-rpm-$(VSCODE_ARCH)
|
||||
displayName: Publish rpm package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
|
||||
artifact: vscode-server-linux-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
|
||||
artifact: vscode-server-linux-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: "Publish Pipeline Artifact"
|
||||
inputs:
|
||||
artifactName: "snap-$(VSCODE_ARCH)"
|
||||
targetPath: .build/linux/snap-tarball
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
@ -26,6 +26,17 @@ rm -rf $ROOT/vscode-server-*.tar.*
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host (Web)
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*-web.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish DEB
|
||||
case $VSCODE_ARCH in
|
||||
x64) DEB_ARCH="amd64" ;;
|
||||
@ -58,3 +69,7 @@ mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
|
||||
# Export DEB_PATH, RPM_PATH
|
||||
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
|
||||
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"
|
||||
|
@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@ -54,3 +54,11 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||
|
||||
# Export SNAP_PATH
|
||||
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
|
||||
|
||||
- publish: $(SNAP_PATH)
|
||||
artifact: vscode-linux-snap-$(VSCODE_ARCH)
|
||||
displayName: Publish snap package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
58
lib/vscode/build/azure-pipelines/mixin.js
Normal file
58
lib/vscode/build/azure-pipelines/mixin.js
Normal file
@ -0,0 +1,58 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const json = require("gulp-json-editor");
|
||||
const buffer = require('gulp-buffer');
|
||||
const filter = require("gulp-filter");
|
||||
const es = require("event-stream");
|
||||
const vfs = require("vinyl-fs");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
function main() {
|
||||
const quality = process.env['VSCODE_QUALITY'];
|
||||
if (!quality) {
|
||||
console.log('Missing VSCODE_QUALITY, skipping mixin');
|
||||
return;
|
||||
}
|
||||
const productJsonFilter = filter(f => f.relative === 'product.json', { restore: true });
|
||||
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
||||
return vfs
|
||||
.src(`quality/${quality}/**`, { base: `quality/${quality}` })
|
||||
.pipe(filter(f => !f.isDirectory()))
|
||||
.pipe(productJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json((o) => {
|
||||
const ossProduct = JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'product.json'), 'utf8'));
|
||||
let builtInExtensions = ossProduct.builtInExtensions;
|
||||
if (Array.isArray(o.builtInExtensions)) {
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'Overwriting built-in extensions:', o.builtInExtensions.map(e => e.name));
|
||||
builtInExtensions = o.builtInExtensions;
|
||||
}
|
||||
else if (o.builtInExtensions) {
|
||||
const include = o.builtInExtensions['include'] || [];
|
||||
const exclude = o.builtInExtensions['exclude'] || [];
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'OSS built-in extensions:', builtInExtensions.map(e => e.name));
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'Including built-in extensions:', include.map(e => e.name));
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'Excluding built-in extensions:', exclude);
|
||||
builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name));
|
||||
builtInExtensions = [...builtInExtensions, ...include];
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'Final built-in extensions:', builtInExtensions.map(e => e.name));
|
||||
}
|
||||
else {
|
||||
fancyLog(ansiColors.blue('[mixin]'), 'Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name));
|
||||
}
|
||||
return Object.assign(Object.assign({ webBuiltInExtensions: ossProduct.webBuiltInExtensions }, o), { builtInExtensions });
|
||||
}))
|
||||
.pipe(productJsonFilter.restore)
|
||||
.pipe(es.mapSync(function (f) {
|
||||
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
||||
return f;
|
||||
}))
|
||||
.pipe(vfs.dest('.'));
|
||||
}
|
||||
main();
|
@ -1,4 +1,3 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
schedules:
|
||||
@ -8,11 +7,108 @@ schedules:
|
||||
include:
|
||||
- master
|
||||
|
||||
parameters:
|
||||
- name: VSCODE_QUALITY
|
||||
displayName: Quality
|
||||
type: string
|
||||
default: insider
|
||||
values:
|
||||
- exploration
|
||||
- insider
|
||||
- stable
|
||||
- name: ENABLE_TERRAPIN
|
||||
displayName: "Enable Terrapin"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32
|
||||
displayName: "🎯 Windows x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32_32BIT
|
||||
displayName: "🎯 Windows ia32"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32_ARM64
|
||||
displayName: "🎯 Windows arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX
|
||||
displayName: "🎯 Linux x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ARM64
|
||||
displayName: "🎯 Linux arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ARMHF
|
||||
displayName: "🎯 Linux armhf"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ALPINE
|
||||
displayName: "🎯 Alpine Linux"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS
|
||||
displayName: "🎯 macOS x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS_ARM64
|
||||
displayName: "🎯 macOS arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS_UNIVERSAL
|
||||
displayName: "🎯 macOS universal"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WEB
|
||||
displayName: "🎯 Web"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_PUBLISH
|
||||
displayName: "Publish to builds.code.visualstudio.com"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_RELEASE
|
||||
displayName: "Release build if successful"
|
||||
type: boolean
|
||||
default: false
|
||||
- name: VSCODE_COMPILE_ONLY
|
||||
displayName: "Run Compile stage exclusively"
|
||||
type: boolean
|
||||
default: false
|
||||
- name: VSCODE_STEP_ON_IT
|
||||
displayName: "Skip tests"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
variables:
|
||||
- name: ENABLE_TERRAPIN
|
||||
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
|
||||
- name: VSCODE_QUALITY
|
||||
value: ${{ parameters.VSCODE_QUALITY }}
|
||||
- name: VSCODE_BUILD_STAGE_WINDOWS
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
|
||||
- name: VSCODE_BUILD_STAGE_LINUX
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true), eq(parameters.VSCODE_BUILD_WEB, true)) }}
|
||||
- name: VSCODE_BUILD_STAGE_MACOS
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}
|
||||
- name: VSCODE_CIBUILD
|
||||
value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}
|
||||
- name: VSCODE_PUBLISH
|
||||
value: ${{ and(eq(parameters.VSCODE_PUBLISH, true), eq(variables.VSCODE_CIBUILD, false)) }}
|
||||
- name: VSCODE_SCHEDULEDBUILD
|
||||
value: ${{ eq(variables['Build.Reason'], 'Schedule') }}
|
||||
- name: VSCODE_STEP_ON_IT
|
||||
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
|
||||
- name: VSCODE_BUILD_MACOS_UNIVERSAL
|
||||
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
|
||||
endpoint: VSCodeHub
|
||||
options: --user 0:0
|
||||
- container: vscode-arm64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
|
||||
endpoint: VSCodeHub
|
||||
@ -26,177 +122,189 @@ stages:
|
||||
- stage: Compile
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
container: vscode-x64
|
||||
pool: compile
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: product-compile.yml
|
||||
|
||||
- stage: Windows
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
jobs:
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}:
|
||||
- stage: Windows
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
jobs:
|
||||
|
||||
- job: Windows32
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
|
||||
- job: Windows
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: WindowsARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true)) }}:
|
||||
- job: Windows32
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- stage: Linux
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}:
|
||||
- job: WindowsARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
dependsOn:
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}:
|
||||
- stage: Linux
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
|
||||
- job: Linux
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
|
||||
- job: LinuxSnap
|
||||
dependsOn:
|
||||
- Linux
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
|
||||
- job: LinuxArmhf
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
# TODO@joaomoreno: We don't ship ARM snaps for now
|
||||
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
|
||||
- job: LinuxSnapArmhf
|
||||
dependsOn:
|
||||
- LinuxArmhf
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
|
||||
- job: LinuxArm64
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
# TODO@joaomoreno: We don't ship ARM snaps for now
|
||||
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
|
||||
- job: LinuxSnapArm64
|
||||
dependsOn:
|
||||
- LinuxArm64
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true)) }}:
|
||||
- job: LinuxAlpine
|
||||
steps:
|
||||
- template: linux/product-build-alpine.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WEB, true)) }}:
|
||||
- job: LinuxWeb
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_MACOS'], true)) }}:
|
||||
- stage: macOS
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
|
||||
- job: macOS
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
|
||||
- job: macOSARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
|
||||
- job: macOSUniversal
|
||||
dependsOn:
|
||||
- macOS
|
||||
- macOSARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: universal
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
|
||||
- stage: Mooncake
|
||||
dependsOn:
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
|
||||
- Windows
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
|
||||
- Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
|
||||
- macOS
|
||||
condition: succeededOrFailed()
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
- job: SyncMooncake
|
||||
displayName: Sync Mooncake
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnapArmhf
|
||||
dependsOn:
|
||||
- LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnapArm64
|
||||
dependsOn:
|
||||
- LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
steps:
|
||||
- template: linux/product-build-alpine.yml
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- stage: macOS
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- stage: macOSARM64
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
- job: macOSARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS_ARM64'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- stage: Mooncake
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
- macOSARM64
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: SyncMooncake
|
||||
displayName: Sync Mooncake
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
- stage: Publish
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
- macOSARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: BuildService
|
||||
displayName: Build Service
|
||||
steps:
|
||||
- template: release.yml
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
|
||||
- stage: Release
|
||||
dependsOn:
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
|
||||
- Windows
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
|
||||
- Linux
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
|
||||
- macOS
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
- job: ReleaseBuild
|
||||
displayName: Release Build
|
||||
steps:
|
||||
- template: release.yml
|
||||
|
@ -1,36 +1,17 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
dryRun: true
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
versionSpec: "12.x"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -43,36 +24,46 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt update -y
|
||||
sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libsecret-1-dev libnotify-bin
|
||||
displayName: Install build tools
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@ -81,67 +72,50 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
# Mixin must run before optimize, because the CSS loader will inline small SVGs
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
yarn valid-layers-check
|
||||
displayName: Run hygiene, monaco compile & valid layers checks
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -
|
||||
./build/azure-pipelines/common/extract-telemetry.sh
|
||||
displayName: Extract Telemetry
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||
./build/azure-pipelines/common/publish-webview.sh
|
||||
displayName: Publish Webview
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
|
||||
displayName: Compile & Hygiene
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
node build/azure-pipelines/upload-sourcemaps
|
||||
displayName: Upload sourcemaps
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -
|
||||
./build/azure-pipelines/common/extract-telemetry.sh
|
||||
displayName: Extract Telemetry
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||
./build/azure-pipelines/common/publish-webview.sh
|
||||
displayName: Publish Webview
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -149,13 +123,16 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||
displayName: Create build
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# we gotta tarball everything in order to preserve file permissions
|
||||
- script: |
|
||||
set -e
|
||||
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
|
||||
displayName: Compress compilation artifact
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
artifactName: Compilation
|
||||
displayName: Publish compilation artifact
|
||||
|
@ -1,2 +0,0 @@
|
||||
node_modules/
|
||||
*.js
|
@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cp = require("child_process");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function isValidTag(t) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
const [major, minor, bug] = t.split('.');
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
@ -9,7 +9,7 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
@ -0,0 +1,72 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const cp = require("child_process");
|
||||
const path = require("path");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
updateDTSFile(outPath, tag);
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function updateDTSFile(outPath, tag) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
function repeat(str, times) {
|
||||
const result = new Array(times);
|
||||
for (let i = 0; i < times; i++) {
|
||||
result[i] = str;
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
function convertTabsToSpaces(str) {
|
||||
return str.replace(/\t/gm, value => repeat(' ', value.length));
|
||||
}
|
||||
function getNewFileContent(content, tag) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the MIT License. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length));
|
||||
}
|
||||
function getNewFileHeader(tag) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the MIT License.`,
|
||||
` * See https://github.com/microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
return header;
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
35
lib/vscode/build/azure-pipelines/upload-cdn.js
Normal file
35
lib/vscode/build/azure-pipelines/upload-cdn.js
Normal file
@ -0,0 +1,35 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const vfs = require("vinyl-fs");
|
||||
const util = require("../lib/util");
|
||||
const filter = require("gulp-filter");
|
||||
const gzip = require("gulp-gzip");
|
||||
const azure = require('gulp-azure-storage');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
function main() {
|
||||
return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
|
||||
.pipe(filter(f => !f.isDirectory()))
|
||||
.pipe(gzip({ append: false }))
|
||||
.pipe(es.through(function (data) {
|
||||
console.log('Uploading CDN file:', data.relative); // debug
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
container: process.env.VSCODE_QUALITY,
|
||||
prefix: commit + '/',
|
||||
contentSettings: {
|
||||
contentEncoding: 'gzip',
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
}));
|
||||
}
|
||||
main();
|
55
lib/vscode/build/azure-pipelines/upload-sourcemaps.js
Normal file
55
lib/vscode/build/azure-pipelines/upload-sourcemaps.js
Normal file
@ -0,0 +1,55 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const vfs = require("vinyl-fs");
|
||||
const util = require("../lib/util");
|
||||
// @ts-ignore
|
||||
const deps = require("../lib/dependencies");
|
||||
const azure = require('gulp-azure-storage');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
// optionally allow to pass in explicit base/maps to upload
|
||||
const [, , base, maps] = process.argv;
|
||||
function src(base, maps = `${base}/**/*.map`) {
|
||||
return vfs.src(maps, { base })
|
||||
.pipe(es.mapSync((f) => {
|
||||
f.path = `${f.base}/core/${f.relative}`;
|
||||
return f;
|
||||
}));
|
||||
}
|
||||
function main() {
|
||||
const sources = [];
|
||||
// vscode client maps (default)
|
||||
if (!base) {
|
||||
const vs = src('out-vscode-min'); // client source-maps only
|
||||
sources.push(vs);
|
||||
const productionDependencies = deps.getProductionDependencies(root);
|
||||
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
|
||||
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
|
||||
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
|
||||
sources.push(nodeModules);
|
||||
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
|
||||
sources.push(extensionsOut);
|
||||
}
|
||||
// specific client base/maps
|
||||
else {
|
||||
sources.push(src(base, maps));
|
||||
}
|
||||
return es.merge(...sources)
|
||||
.pipe(es.through(function (data) {
|
||||
console.log('Uploading Sourcemap', data.relative); // debug
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
container: 'sourcemaps',
|
||||
prefix: commit + '/'
|
||||
}));
|
||||
}
|
||||
main();
|
@ -11,7 +11,7 @@ import * as Vinyl from 'vinyl';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as util from '../lib/util';
|
||||
// @ts-ignore
|
||||
import * as deps from '../dependencies';
|
||||
import * as deps from '../lib/dependencies';
|
||||
const azure = require('gulp-azure-storage');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
|
@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
@ -48,30 +38,35 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "web" $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- script: |
|
||||
echo -n "web" > .build/arch
|
||||
displayName: Prepare yarn cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@ -80,21 +75,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@ -130,3 +123,8 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
./build/azure-pipelines/web/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
|
||||
artifact: vscode-web-standalone
|
||||
displayName: Publish web archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
@ -1,6 +1,10 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
</configuration>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<disabledPackageSources>
|
||||
<clear />
|
||||
</disabledPackageSources>
|
||||
</configuration>
|
||||
|
@ -1,87 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: "2.x"
|
||||
addToPath: true
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
|
||||
- powershell: |
|
||||
yarn --frozen-lockfile
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "vscode-build-cache"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn postinstall }
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
yarn electron
|
||||
displayName: Download Electron
|
||||
|
||||
- powershell: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
|
||||
- powershell: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
|
||||
- powershell: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
- powershell: |
|
||||
yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run Unit Tests (Browser)
|
||||
|
||||
- powershell: |
|
||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests (Electron)
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: "Publish Crash Reports"
|
||||
inputs:
|
||||
artifactName: crash-dump-windows
|
||||
targetPath: .build\crashes
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
@ -3,7 +3,7 @@ $ErrorActionPreference = "Stop"
|
||||
|
||||
$CertBytes = [System.Convert]::FromBase64String($CertBase64)
|
||||
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable)
|
||||
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
|
||||
|
||||
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
|
||||
$CertStore.Open("ReadWrite")
|
||||
|
@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- powershell: |
|
||||
mkdir .build -ea 0
|
||||
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@ -39,6 +18,18 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { tar --force-local -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz }
|
||||
displayName: Extract compilation output
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
@ -51,26 +42,34 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
|
||||
exec { git fetch distro }
|
||||
exec { git merge $(node -p "require('./package.json').distro") }
|
||||
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
|
||||
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { 7z.exe x .build/node_modules_cache/cache.7z -aos }
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@ -79,22 +78,20 @@ steps:
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
retry { exec { yarn --frozen-lockfile } }
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn postinstall }
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt }
|
||||
exec { mkdir -Force .build/node_modules_cache }
|
||||
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@ -107,11 +104,18 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
|
||||
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
|
||||
displayName: Build
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
|
||||
displayName: Prepare Package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
@ -122,12 +126,21 @@ steps:
|
||||
displayName: Build Server
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" }
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn electron $(VSCODE_ARCH) }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@ -135,6 +148,14 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn --cwd test/integration/browser compile }
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@ -148,6 +169,7 @@ steps:
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@ -155,6 +177,7 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@ -165,6 +188,7 @@ steps:
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
@ -180,7 +204,7 @@ steps:
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: and(succeededOrFailed(), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@ -236,6 +260,7 @@ steps:
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: NuGetCommand@2
|
||||
displayName: Install ESRPClient.exe
|
||||
@ -245,11 +270,13 @@ steps:
|
||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: "ESRP Nuget"
|
||||
restoreDirectory: packages
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
displayName: Import ESRP Request Signing Certificate
|
||||
inputs:
|
||||
ESRP: "ESRP CodeSign"
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
@ -257,6 +284,7 @@ steps:
|
||||
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
|
||||
arguments: "$(ESRP-SSL-AADAuth)"
|
||||
displayName: Import ESRP Auth Certificate
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@ -266,6 +294,32 @@ steps:
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
.\build\azure-pipelines\win32\publish.ps1
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)
|
||||
displayName: Publish archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)-setup
|
||||
displayName: Publish system setup
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
|
||||
displayName: Publish user setup
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-server-win32-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
|
||||
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
@ -6,8 +6,7 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { remote } = require('electron');
|
||||
const dialog = remote.dialog;
|
||||
const { ipcRenderer } = require('electron');
|
||||
|
||||
const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
@ -84,17 +83,13 @@ function render(el, state) {
|
||||
}
|
||||
|
||||
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
|
||||
localInput.onchange = function () {
|
||||
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
|
||||
title: 'Choose Folder',
|
||||
properties: ['openDirectory']
|
||||
});
|
||||
localInput.onchange = async function () {
|
||||
const result = await ipcRenderer.invoke('pickdir');
|
||||
|
||||
if (result && result.length >= 1) {
|
||||
control[ext.name] = result[0];
|
||||
if (result) {
|
||||
control[ext.name] = result;
|
||||
setState({ builtin, control });
|
||||
}
|
||||
|
||||
setState({ builtin, control });
|
||||
};
|
||||
|
||||
if (local) {
|
||||
|
@ -3,12 +3,25 @@
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const { app, BrowserWindow } = require('electron');
|
||||
const { app, BrowserWindow, ipcMain, dialog } = require('electron');
|
||||
const url = require('url');
|
||||
const path = require('path');
|
||||
|
||||
let window = null;
|
||||
|
||||
ipcMain.handle('pickdir', async () => {
|
||||
const result = await dialog.showOpenDialog(window, {
|
||||
title: 'Choose Folder',
|
||||
properties: ['openDirectory']
|
||||
});
|
||||
|
||||
if (result.canceled || result.filePaths.length < 1) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return result.filePaths[0];
|
||||
});
|
||||
|
||||
app.once('ready', () => {
|
||||
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
|
||||
window.setMenuBarVisibility(false);
|
||||
|
58
lib/vscode/build/darwin/create-universal-app.js
Normal file
58
lib/vscode/build/darwin/create-universal-app.js
Normal file
@ -0,0 +1,58 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const vscode_universal_1 = require("vscode-universal");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const plist = require("plist");
|
||||
const product = require("../../product.json");
|
||||
async function main() {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
const arch = process.env['VSCODE_ARCH'];
|
||||
if (!buildDir) {
|
||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||
}
|
||||
const appName = product.nameLong + '.app';
|
||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||
await vscode_universal_1.makeUniversalApp({
|
||||
x64AppPath,
|
||||
arm64AppPath,
|
||||
x64AsarPath,
|
||||
arm64AsarPath,
|
||||
filesToSkip: [
|
||||
'product.json',
|
||||
'Credits.rtf',
|
||||
'CodeResources',
|
||||
'fsevents.node',
|
||||
'.npmrc'
|
||||
],
|
||||
outAppPath,
|
||||
force: true
|
||||
});
|
||||
let productJson = await fs.readJson(productJsonPath);
|
||||
Object.assign(productJson, {
|
||||
darwinUniversalAssetId: 'darwin-universal'
|
||||
});
|
||||
await fs.writeJson(productJsonPath, productJson);
|
||||
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||
let infoPlistJson = plist.parse(infoPlistString);
|
||||
Object.assign(infoPlistJson, {
|
||||
LSRequiresNativeExecution: true
|
||||
});
|
||||
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||
}
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
66
lib/vscode/build/darwin/create-universal-app.ts
Normal file
66
lib/vscode/build/darwin/create-universal-app.ts
Normal file
@ -0,0 +1,66 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { makeUniversalApp } from 'vscode-universal';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as plist from 'plist';
|
||||
import * as product from '../../product.json';
|
||||
|
||||
async function main() {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
const arch = process.env['VSCODE_ARCH'];
|
||||
|
||||
if (!buildDir) {
|
||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||
}
|
||||
|
||||
const appName = product.nameLong + '.app';
|
||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||
|
||||
await makeUniversalApp({
|
||||
x64AppPath,
|
||||
arm64AppPath,
|
||||
x64AsarPath,
|
||||
arm64AsarPath,
|
||||
filesToSkip: [
|
||||
'product.json',
|
||||
'Credits.rtf',
|
||||
'CodeResources',
|
||||
'fsevents.node',
|
||||
'.npmrc'
|
||||
],
|
||||
outAppPath,
|
||||
force: true
|
||||
});
|
||||
|
||||
let productJson = await fs.readJson(productJsonPath);
|
||||
Object.assign(productJson, {
|
||||
darwinUniversalAssetId: 'darwin-universal'
|
||||
});
|
||||
await fs.writeJson(productJsonPath, productJson);
|
||||
|
||||
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||
let infoPlistJson = plist.parse(infoPlistString);
|
||||
Object.assign(infoPlistJson, {
|
||||
LSRequiresNativeExecution: true
|
||||
});
|
||||
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
58
lib/vscode/build/darwin/sign.js
Normal file
58
lib/vscode/build/darwin/sign.js
Normal file
@ -0,0 +1,58 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const codesign = require("electron-osx-sign");
|
||||
const path = require("path");
|
||||
const util = require("../lib/util");
|
||||
const product = require("../../product.json");
|
||||
async function main() {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
|
||||
const arch = process.env['VSCODE_ARCH'];
|
||||
if (!buildDir) {
|
||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||
}
|
||||
if (!tempDir) {
|
||||
throw new Error('$AGENT_TEMPDIRECTORY not set');
|
||||
}
|
||||
const baseDir = path.dirname(__dirname);
|
||||
const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
|
||||
const appName = product.nameLong + '.app';
|
||||
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
|
||||
const helperAppBaseName = product.nameShort;
|
||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||
const defaultOpts = {
|
||||
app: path.join(appRoot, appName),
|
||||
platform: 'darwin',
|
||||
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'),
|
||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'),
|
||||
hardenedRuntime: true,
|
||||
'pre-auto-entitlements': false,
|
||||
'pre-embed-provisioning-profile': false,
|
||||
keychain: path.join(tempDir, 'buildagent.keychain'),
|
||||
version: util.getElectronVersion(),
|
||||
identity: '99FM488X57',
|
||||
'gatekeeper-assess': false
|
||||
};
|
||||
const appOpts = Object.assign(Object.assign({}, defaultOpts), {
|
||||
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
|
||||
ignore: (filePath) => {
|
||||
return filePath.includes(gpuHelperAppName) ||
|
||||
filePath.includes(rendererHelperAppName);
|
||||
} });
|
||||
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
|
||||
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
|
||||
await codesign.signAsync(gpuHelperOpts);
|
||||
await codesign.signAsync(rendererHelperOpts);
|
||||
await codesign.signAsync(appOpts);
|
||||
}
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
@ -29,7 +29,6 @@ async function main(): Promise<void> {
|
||||
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
|
||||
const helperAppBaseName = product.nameShort;
|
||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
|
||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||
|
||||
const defaultOpts: codesign.SignOptions = {
|
||||
@ -51,7 +50,6 @@ async function main(): Promise<void> {
|
||||
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
|
||||
ignore: (filePath: string) => {
|
||||
return filePath.includes(gpuHelperAppName) ||
|
||||
filePath.includes(pluginHelperAppName) ||
|
||||
filePath.includes(rendererHelperAppName);
|
||||
}
|
||||
};
|
||||
@ -63,13 +61,6 @@ async function main(): Promise<void> {
|
||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
|
||||
};
|
||||
|
||||
const pluginHelperOpts: codesign.SignOptions = {
|
||||
...defaultOpts,
|
||||
app: path.join(appFrameworkPath, pluginHelperAppName),
|
||||
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
|
||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
|
||||
};
|
||||
|
||||
const rendererHelperOpts: codesign.SignOptions = {
|
||||
...defaultOpts,
|
||||
app: path.join(appFrameworkPath, rendererHelperAppName),
|
||||
@ -78,7 +69,6 @@ async function main(): Promise<void> {
|
||||
};
|
||||
|
||||
await codesign.signAsync(gpuHelperOpts);
|
||||
await codesign.signAsync(pluginHelperOpts);
|
||||
await codesign.signAsync(rendererHelperOpts);
|
||||
await codesign.signAsync(appOpts as any);
|
||||
}
|
||||
|
36
lib/vscode/build/eslint.js
Normal file
36
lib/vscode/build/eslint.js
Normal file
@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const es = require('event-stream');
|
||||
const vfs = require('vinyl-fs');
|
||||
const { jsHygieneFilter, tsHygieneFilter } = require('./filters');
|
||||
|
||||
function eslint() {
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
return vfs
|
||||
.src([...jsHygieneFilter, ...tsHygieneFilter], { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(
|
||||
gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint'],
|
||||
})
|
||||
)
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(
|
||||
gulpeslint.results((results) => {
|
||||
if (results.warningCount > 0 || results.errorCount > 0) {
|
||||
throw new Error('eslint failed with warnings and/or errors');
|
||||
}
|
||||
})
|
||||
).pipe(es.through(function () { /* noop, important for the stream to end */ }));
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
eslint().on('error', (err) => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
207
lib/vscode/build/ext.js
Normal file
207
lib/vscode/build/ext.js
Normal file
@ -0,0 +1,207 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const cp = require('child_process');
|
||||
const os = require('os');
|
||||
const mkdirp = require('mkdirp');
|
||||
const product = require('../product.json');
|
||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
||||
const exists = (path) => fs.stat(path).then(() => true, () => false);
|
||||
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
async function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(await fs.readFile(controlFilePath, 'utf8'));
|
||||
} catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
async function writeControlFile(control) {
|
||||
await mkdirp(path.dirname(controlFilePath));
|
||||
await fs.writeFile(controlFilePath, JSON.stringify(control, null, ' '));
|
||||
}
|
||||
|
||||
async function exec(cmd, args, opts = {}) {
|
||||
return new Promise((c, e) => {
|
||||
const child = cp.spawn(cmd, args, { stdio: 'inherit', env: process.env, ...opts });
|
||||
child.on('close', code => code === 0 ? c() : e(`Returned ${code}`));
|
||||
});
|
||||
}
|
||||
|
||||
function getFolderPath(extDesc) {
|
||||
const folder = extDesc.repo.replace(/.*\//, '');
|
||||
return folderPath = path.join(root, folder);
|
||||
}
|
||||
|
||||
async function getExtensionType(folderPath) {
|
||||
const pkg = JSON.parse(await fs.readFile(path.join(folderPath, 'package.json'), 'utf8'));
|
||||
|
||||
if (pkg['contributes']['themes'] || pkg['contributes']['iconThemes']) {
|
||||
return 'theme';
|
||||
} else if (pkg['contributes']['grammars']) {
|
||||
return 'grammar';
|
||||
} else {
|
||||
return 'misc';
|
||||
}
|
||||
}
|
||||
|
||||
async function initExtension(extDesc) {
|
||||
const folderPath = getFolderPath(extDesc);
|
||||
|
||||
if (!await exists(folderPath)) {
|
||||
console.log(`⏳ git clone: ${extDesc.name}`);
|
||||
await exec('git', ['clone', `${extDesc.repo}.git`], { cwd: root });
|
||||
}
|
||||
|
||||
const type = await getExtensionType(folderPath);
|
||||
return { path: folderPath, type, ...extDesc };
|
||||
}
|
||||
|
||||
async function createWorkspace(type, extensions) {
|
||||
const workspaceName = `vscode-${type}-extensions.code-workspace`;
|
||||
const workspacePath = path.join(root, workspaceName);
|
||||
const workspace = { folders: extensions.map(ext => ({ path: path.basename(ext.path) })) };
|
||||
|
||||
if (!await exists(workspacePath)) {
|
||||
console.log(`✅ create workspace: ${workspaceName}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(workspacePath, JSON.stringify(workspace, undefined, ' '));
|
||||
}
|
||||
|
||||
async function init() {
|
||||
const extensions = [];
|
||||
|
||||
for (const extDesc of product.builtInExtensions) {
|
||||
extensions.push(await initExtension(extDesc));
|
||||
}
|
||||
|
||||
await createWorkspace('all', extensions);
|
||||
|
||||
const byType = extensions
|
||||
.reduce((m, e) => m.set(e.type, [...(m.get(e.type) || []), e]), new Map());
|
||||
|
||||
for (const [type, extensions] of byType) {
|
||||
await createWorkspace(type, extensions);
|
||||
}
|
||||
|
||||
return byType;
|
||||
}
|
||||
|
||||
async function status() {
|
||||
const byType = await init();
|
||||
const control = await readControlFile();
|
||||
|
||||
for (const [type, extensions] of byType) {
|
||||
console.log(`${type} (${extensions.length} extensions):`);
|
||||
|
||||
const maxWidth = Math.max(...extensions.map(e => e.name.length));
|
||||
for (const ext of extensions) {
|
||||
console.log(` ${ext.name.padEnd(maxWidth, ' ')} ➡ ${control[ext.name]}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`total: ${product.builtInExtensions.length} extensions`);
|
||||
}
|
||||
|
||||
async function each([cmd, ...args], opts) {
|
||||
await init();
|
||||
|
||||
for (const extDesc of product.builtInExtensions) {
|
||||
const folderPath = getFolderPath(extDesc);
|
||||
|
||||
if (opts.type) {
|
||||
const type = await getExtensionType(folderPath);
|
||||
|
||||
if (type !== opts.type) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`👉 ${extDesc.name}`);
|
||||
await exec(cmd, args, { cwd: folderPath });
|
||||
}
|
||||
}
|
||||
|
||||
async function _link(extensions, opts, fn) {
|
||||
await init();
|
||||
|
||||
const control = await readControlFile();
|
||||
|
||||
for (const extDesc of product.builtInExtensions) {
|
||||
if (extensions.length > 0 && extensions.indexOf(extDesc.name) === -1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (opts.type) {
|
||||
const folderPath = getFolderPath(extDesc);
|
||||
const type = await getExtensionType(folderPath);
|
||||
|
||||
if (type !== opts.type) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
await fn(control, extDesc);
|
||||
}
|
||||
|
||||
await writeControlFile(control);
|
||||
}
|
||||
|
||||
async function link(extensions, opts) {
|
||||
await _link(extensions, opts, async (control, extDesc) => {
|
||||
const ext = await initExtension(extDesc);
|
||||
control[extDesc.name] = ext.path;
|
||||
console.log(`👉 link: ${extDesc.name} ➡ ${ext.path}`);
|
||||
});
|
||||
}
|
||||
|
||||
async function unlink(extensions, opts) {
|
||||
await _link(extensions, opts, async (control, extDesc) => {
|
||||
control[extDesc.name] = 'marketplace';
|
||||
console.log(`👉 unlink: ${extDesc.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const { program } = require('commander');
|
||||
|
||||
program.version('0.0.1');
|
||||
|
||||
program
|
||||
.command('init')
|
||||
.description('Initialize workspace with built-in extensions')
|
||||
.action(init);
|
||||
|
||||
program
|
||||
.command('status')
|
||||
.description('Print extension status')
|
||||
.action(status);
|
||||
|
||||
program
|
||||
.command('each <command...>')
|
||||
.option('-t, --type <type>', 'Specific type only')
|
||||
.description('Run a command in each extension repository')
|
||||
.allowUnknownOption()
|
||||
.action(each);
|
||||
|
||||
program
|
||||
.command('link [extensions...]')
|
||||
.option('-t, --type <type>', 'Specific type only')
|
||||
.description('Link with code-oss')
|
||||
.action(link);
|
||||
|
||||
program
|
||||
.command('unlink [extensions...]')
|
||||
.option('-t, --type <type>', 'Specific type only')
|
||||
.description('Unlink from code-oss')
|
||||
.action(unlink);
|
||||
|
||||
program.parseAsync(process.argv);
|
||||
}
|
151
lib/vscode/build/filters.js
Normal file
151
lib/vscode/build/filters.js
Normal file
@ -0,0 +1,151 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* Hygiene works by creating cascading subsets of all our files and
|
||||
* passing them through a sequence of checks. Here are the current subsets,
|
||||
* named according to the checks performed on them. Each subset contains
|
||||
* the following one, as described in mathematical notation:
|
||||
*
|
||||
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
|
||||
*/
|
||||
|
||||
module.exports.all = [
|
||||
'*',
|
||||
'build/**/*',
|
||||
'extensions/**/*',
|
||||
'scripts/**/*',
|
||||
'src/**/*',
|
||||
'test/**/*',
|
||||
'!out*/**',
|
||||
'!test/**/out/**',
|
||||
'!**/node_modules/**',
|
||||
];
|
||||
|
||||
module.exports.indentationFilter = [
|
||||
'**',
|
||||
|
||||
// except specific files
|
||||
'!**/ThirdPartyNotices.txt',
|
||||
'!**/LICENSE.{txt,rtf}',
|
||||
'!LICENSES.chromium.html',
|
||||
'!**/LICENSE',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/insane/insane.js',
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/semver/semver.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/unit/assert.js',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
|
||||
// except specific folders
|
||||
'!test/automation/out/**',
|
||||
'!test/monaco/out/**',
|
||||
'!test/smoke/out/**',
|
||||
'!extensions/typescript-language-features/test-workspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!build/monaco/**',
|
||||
'!build/win32/**',
|
||||
|
||||
// except multiple specific files
|
||||
'!**/package.json',
|
||||
'!**/yarn.lock',
|
||||
'!**/yarn-error.log',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/codicon/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/dist/**',
|
||||
'!extensions/**/out/**',
|
||||
'!extensions/**/snippets/**',
|
||||
'!extensions/**/syntaxes/**',
|
||||
'!extensions/**/themes/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
|
||||
// except specific file types
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
|
||||
'!build/{lib,download,darwin}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!**/Dockerfile.*',
|
||||
'!**/*.Dockerfile',
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js',
|
||||
'!extensions/simple-browser/media/*.js',
|
||||
];
|
||||
|
||||
module.exports.copyrightFilter = [
|
||||
'**',
|
||||
'!**/*.desktop',
|
||||
'!**/*.json',
|
||||
'!**/*.html',
|
||||
'!**/*.template',
|
||||
'!**/*.md',
|
||||
'!**/*.bat',
|
||||
'!**/*.cmd',
|
||||
'!**/*.ico',
|
||||
'!**/*.icns',
|
||||
'!**/*.xml',
|
||||
'!**/*.sh',
|
||||
'!**/*.txt',
|
||||
'!**/*.xpm',
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/*.js.map',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!resources/web/code-web.js',
|
||||
'!resources/completions/**',
|
||||
'!extensions/configuration-editing/build/inline-allOf.ts',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
];
|
||||
|
||||
module.exports.jsHygieneFilter = [
|
||||
'src/**/*.js',
|
||||
'build/gulpfile.*.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/**/insane.js',
|
||||
'!src/**/marked.js',
|
||||
'!src/**/semver.js',
|
||||
'!**/test/**',
|
||||
];
|
||||
|
||||
module.exports.tsHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/fixtures/**',
|
||||
'!**/typings/**',
|
||||
'!**/node_modules/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||
];
|
@ -11,6 +11,11 @@ const task = require('./lib/task');
|
||||
const compilation = require('./lib/compilation');
|
||||
|
||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
|
||||
const compileBuildTask = task.define('compile-build',
|
||||
task.series(
|
||||
util.rimraf('out-build'),
|
||||
compilation.compileTask('src', 'out-build', true)
|
||||
)
|
||||
);
|
||||
gulp.task(compileBuildTask);
|
||||
exports.compileBuildTask = compileBuildTask;
|
||||
exports.compileBuildTask = compileBuildTask;
|
||||
|
@ -16,8 +16,6 @@ const cp = require('child_process');
|
||||
const compilation = require('./lib/compilation');
|
||||
const monacoapi = require('./lib/monaco-api');
|
||||
const fs = require('fs');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
let root = path.dirname(__dirname);
|
||||
let sha1 = util.getVersion(root);
|
||||
@ -369,6 +367,9 @@ gulp.task('editor-distro',
|
||||
);
|
||||
|
||||
const bundleEditorESMTask = task.define('editor-esm-bundle-webpack', () => {
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
const result = es.through();
|
||||
|
||||
const webpackConfigPath = path.join(root, 'build/monaco/monaco.webpack.config.js');
|
||||
|
@ -9,17 +9,13 @@ require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const nodeUtil = require('util');
|
||||
const tsb = require('gulp-tsb');
|
||||
const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const webpack = require('webpack');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const watcher = require('./lib/watch');
|
||||
const createReporter = require('./lib/reporter').createReporter;
|
||||
const glob = require('glob');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
@ -29,10 +25,49 @@ const ext = require('./lib/extensions');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
|
||||
const compilations = glob.sync('**/tsconfig.json', {
|
||||
cwd: extensionsPath,
|
||||
ignore: ['**/out/**', '**/node_modules/**']
|
||||
});
|
||||
// To save 250ms for each gulp startup, we are caching the result here
|
||||
// const compilations = glob.sync('**/tsconfig.json', {
|
||||
// cwd: extensionsPath,
|
||||
// ignore: ['**/out/**', '**/node_modules/**']
|
||||
// });
|
||||
const compilations = [
|
||||
'configuration-editing/build/tsconfig.json',
|
||||
'configuration-editing/tsconfig.json',
|
||||
'css-language-features/client/tsconfig.json',
|
||||
'css-language-features/server/tsconfig.json',
|
||||
'debug-auto-launch/tsconfig.json',
|
||||
'debug-server-ready/tsconfig.json',
|
||||
'emmet/tsconfig.json',
|
||||
'extension-editing/tsconfig.json',
|
||||
'git-ui/tsconfig.json',
|
||||
'git/tsconfig.json',
|
||||
'github-authentication/tsconfig.json',
|
||||
'github/tsconfig.json',
|
||||
'grunt/tsconfig.json',
|
||||
'gulp/tsconfig.json',
|
||||
'html-language-features/client/tsconfig.json',
|
||||
'html-language-features/server/tsconfig.json',
|
||||
'image-preview/tsconfig.json',
|
||||
'jake/tsconfig.json',
|
||||
'json-language-features/client/tsconfig.json',
|
||||
'json-language-features/server/tsconfig.json',
|
||||
'markdown-language-features/preview-src/tsconfig.json',
|
||||
'markdown-language-features/tsconfig.json',
|
||||
'merge-conflict/tsconfig.json',
|
||||
'microsoft-authentication/tsconfig.json',
|
||||
'npm/tsconfig.json',
|
||||
'php-language-features/tsconfig.json',
|
||||
'search-result/tsconfig.json',
|
||||
'simple-browser/tsconfig.json',
|
||||
'testing-editor-contributions/tsconfig.json',
|
||||
'typescript-language-features/test-workspace/tsconfig.json',
|
||||
'typescript-language-features/tsconfig.json',
|
||||
'vscode-api-tests/tsconfig.json',
|
||||
'vscode-colorize-tests/tsconfig.json',
|
||||
'vscode-custom-editor-tests/tsconfig.json',
|
||||
'vscode-notebook-tests/tsconfig.json',
|
||||
'vscode-test-resolver/tsconfig.json'
|
||||
];
|
||||
|
||||
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
|
||||
|
||||
@ -64,6 +99,10 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
}
|
||||
|
||||
function createPipeline(build, emitError) {
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const tsb = require('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
|
||||
const reporter = createReporter('extensions');
|
||||
|
||||
overrideOptions.inlineSources = Boolean(build);
|
||||
@ -170,6 +209,8 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
|
||||
));
|
||||
|
||||
gulp.task(compileExtensionsBuildTask);
|
||||
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask)));
|
||||
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
|
||||
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
|
||||
@ -181,6 +222,7 @@ gulp.task(watchWebExtensionsTask);
|
||||
exports.watchWebExtensionsTask = watchWebExtensionsTask;
|
||||
|
||||
async function buildWebExtensions(isWatch) {
|
||||
const webpack = require('webpack');
|
||||
|
||||
const webpackConfigLocations = await nodeUtil.promisify(glob)(
|
||||
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
|
||||
|
@ -4,52 +4,33 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const gulp = require('gulp');
|
||||
const filter = require('gulp-filter');
|
||||
const es = require('event-stream');
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
const vfs = require('vinyl-fs');
|
||||
const path = require('path');
|
||||
const task = require('./lib/task');
|
||||
const { all, jsHygieneFilter, tsHygieneFilter, hygiene } = require('./hygiene');
|
||||
|
||||
gulp.task('eslint', () => {
|
||||
return vfs
|
||||
.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
|
||||
.pipe(
|
||||
gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint'],
|
||||
})
|
||||
)
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(
|
||||
gulpeslint.results((results) => {
|
||||
if (results.warningCount > 0 || results.errorCount > 0) {
|
||||
throw new Error('eslint failed with warnings and/or errors');
|
||||
}
|
||||
})
|
||||
);
|
||||
});
|
||||
const { hygiene } = require('./hygiene');
|
||||
|
||||
function checkPackageJSON(actualPath) {
|
||||
const actual = require(path.join(__dirname, '..', actualPath));
|
||||
const rootPackageJSON = require('../package.json');
|
||||
const checkIncluded = (set1, set2) => {
|
||||
for (let depName in set1) {
|
||||
const depVersion = set1[depName];
|
||||
const rootDepVersion = set2[depName];
|
||||
if (!rootDepVersion) {
|
||||
// missing in root is allowed
|
||||
continue;
|
||||
}
|
||||
if (depVersion !== rootDepVersion) {
|
||||
this.emit(
|
||||
'error',
|
||||
`The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for (let depName in actual.dependencies) {
|
||||
const depVersion = actual.dependencies[depName];
|
||||
const rootDepVersion = rootPackageJSON.dependencies[depName];
|
||||
if (!rootDepVersion) {
|
||||
// missing in root is allowed
|
||||
continue;
|
||||
}
|
||||
if (depVersion !== rootDepVersion) {
|
||||
this.emit(
|
||||
'error',
|
||||
`The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`
|
||||
);
|
||||
}
|
||||
}
|
||||
checkIncluded(actual.dependencies, rootPackageJSON.dependencies);
|
||||
checkIncluded(actual.devDependencies, rootPackageJSON.devDependencies);
|
||||
}
|
||||
|
||||
const checkPackageJSONTask = task.define('check-package-json', () => {
|
||||
@ -57,12 +38,11 @@ const checkPackageJSONTask = task.define('check-package-json', () => {
|
||||
es.through(function () {
|
||||
checkPackageJSON.call(this, 'remote/package.json');
|
||||
checkPackageJSON.call(this, 'remote/web/package.json');
|
||||
checkPackageJSON.call(this, 'build/package.json');
|
||||
})
|
||||
);
|
||||
});
|
||||
gulp.task(checkPackageJSONTask);
|
||||
|
||||
gulp.task(
|
||||
'hygiene',
|
||||
task.series(checkPackageJSONTask, () => hygiene())
|
||||
);
|
||||
const hygieneTask = task.define('hygiene', task.series(checkPackageJSONTask, () => hygiene(undefined, false)));
|
||||
gulp.task(hygieneTask);
|
||||
|
41
lib/vscode/build/gulpfile.js
Normal file
41
lib/vscode/build/gulpfile.js
Normal file
@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
// Increase max listeners for event emitters
|
||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
|
||||
const gulp = require('gulp');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const compilation = require('./lib/compilation');
|
||||
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
|
||||
const { compileExtensionsTask, watchExtensionsTask } = require('./gulpfile.extensions');
|
||||
|
||||
// Fast compile for development time
|
||||
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
|
||||
gulp.task(compileClientTask);
|
||||
|
||||
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false)));
|
||||
gulp.task(watchClientTask);
|
||||
|
||||
// All
|
||||
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask));
|
||||
gulp.task(compileTask);
|
||||
|
||||
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));
|
||||
|
||||
// Default
|
||||
gulp.task('default', compileTask);
|
||||
|
||||
process.on('unhandledRejection', (reason, p) => {
|
||||
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Load all the gulpfiles only if running tasks other than the editor tasks
|
||||
require('glob').sync('gulpfile.*.js', { cwd: __dirname })
|
||||
.forEach(f => require(`./${f}`));
|
@ -14,10 +14,8 @@ const task = require('./lib/task');
|
||||
const vfs = require('vinyl-fs');
|
||||
const flatmap = require('gulp-flatmap');
|
||||
const gunzip = require('gulp-gunzip');
|
||||
const untar = require('gulp-untar');
|
||||
const File = require('vinyl');
|
||||
const fs = require('fs');
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const rename = require('gulp-rename');
|
||||
const filter = require('gulp-filter');
|
||||
const cp = require('child_process');
|
||||
@ -71,13 +69,17 @@ BUILD_TARGETS.forEach(({ platform, arch }) => {
|
||||
}));
|
||||
});
|
||||
|
||||
const defaultNodeTask = gulp.task(`node-${process.platform}-${process.arch}`);
|
||||
const arch = process.platform === 'darwin' ? 'x64' : process.arch;
|
||||
const defaultNodeTask = gulp.task(`node-${process.platform}-${arch}`);
|
||||
|
||||
if (defaultNodeTask) {
|
||||
gulp.task(task.define('node', defaultNodeTask));
|
||||
}
|
||||
|
||||
function nodejs(platform, arch) {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const untar = require('gulp-untar');
|
||||
|
||||
if (arch === 'ia32') {
|
||||
arch = 'x86';
|
||||
}
|
||||
|
@ -11,13 +11,10 @@ const os = require('os');
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const es = require('event-stream');
|
||||
const azure = require('gulp-azure-storage');
|
||||
const electron = require('gulp-atom-electron');
|
||||
const vfs = require('vinyl-fs');
|
||||
const rename = require('gulp-rename');
|
||||
const replace = require('gulp-replace');
|
||||
const filter = require('gulp-filter');
|
||||
const json = require('gulp-json-editor');
|
||||
const _ = require('underscore');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
@ -29,15 +26,13 @@ const packageJson = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
const deps = require('./dependencies');
|
||||
const { getProductionDependencies } = require('./lib/dependencies');
|
||||
const { config } = require('./lib/electron');
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const minimist = require('minimist');
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
|
||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||
|
||||
// Build
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||
@ -80,7 +75,6 @@ const vscodeResources = [
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-sandbox/issue/issueReporter.js',
|
||||
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
|
||||
'out-build/vs/code/electron-sandbox/proxy/auth.js',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
@ -105,6 +99,16 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||
));
|
||||
gulp.task(minifyVSCodeTask);
|
||||
|
||||
const core = task.define('core-ci', task.series(
|
||||
gulp.task('compile-build'),
|
||||
task.parallel(
|
||||
gulp.task('minify-vscode'),
|
||||
gulp.task('minify-vscode-reh'),
|
||||
gulp.task('minify-vscode-reh-web'),
|
||||
)
|
||||
));
|
||||
gulp.task(core);
|
||||
|
||||
/**
|
||||
* Compute checksums for some files.
|
||||
*
|
||||
@ -146,6 +150,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor');
|
||||
|
||||
const out = sourceFolderName;
|
||||
|
||||
const checksums = computeChecksums(out, [
|
||||
@ -161,7 +168,16 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']));
|
||||
|
||||
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
|
||||
const platformSpecificBuiltInExtensionsExclusions = product.builtInExtensions.filter(ext => {
|
||||
if (!ext.platforms) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const set = new Set(ext.platforms);
|
||||
return !set.has(platform);
|
||||
}).map(ext => `!.build/extensions/${ext.name}/**`);
|
||||
|
||||
const extensions = gulp.src(['.build/extensions/**', ...platformSpecificBuiltInExtensionsExclusions], { base: '.build', dot: true });
|
||||
|
||||
const sources = es.merge(src, extensions)
|
||||
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
||||
@ -203,10 +219,11 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
|
||||
const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path));
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const productionDependencies = getProductionDependencies(root);
|
||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
|
||||
|
||||
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.js.map']))
|
||||
.pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-85/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.js.map']))
|
||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
|
||||
.pipe(jsFilter)
|
||||
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
|
||||
@ -505,6 +522,8 @@ gulp.task(task.define(
|
||||
task.series(
|
||||
generateVSCodeConfigurationTask,
|
||||
() => {
|
||||
const azure = require('gulp-azure-storage');
|
||||
|
||||
if (!shouldSetupSettingsSearch()) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
|
@ -95,10 +95,6 @@ function prepareDebPackage(arch) {
|
||||
|
||||
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ARCHITECTURE@@', debArch))
|
||||
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(replace('@@REPOSITORY_NAME@@', arch === 'x64' ? 'vscode' : 'code'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
|
||||
|
@ -5,156 +5,12 @@
|
||||
|
||||
const filter = require('gulp-filter');
|
||||
const es = require('event-stream');
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
const tsfmt = require('typescript-formatter');
|
||||
const VinylFile = require('vinyl');
|
||||
const vfs = require('vinyl-fs');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const pall = require('p-all');
|
||||
|
||||
/**
|
||||
* Hygiene works by creating cascading subsets of all our files and
|
||||
* passing them through a sequence of checks. Here are the current subsets,
|
||||
* named according to the checks performed on them. Each subset contains
|
||||
* the following one, as described in mathematical notation:
|
||||
*
|
||||
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
|
||||
*/
|
||||
|
||||
const all = [
|
||||
'*',
|
||||
'build/**/*',
|
||||
'extensions/**/*',
|
||||
'scripts/**/*',
|
||||
'src/**/*',
|
||||
'test/**/*',
|
||||
'!test/**/out/**',
|
||||
'!**/node_modules/**',
|
||||
];
|
||||
module.exports.all = all;
|
||||
|
||||
const indentationFilter = [
|
||||
'**',
|
||||
|
||||
// except specific files
|
||||
'!**/ThirdPartyNotices.txt',
|
||||
'!**/LICENSE.{txt,rtf}',
|
||||
'!LICENSES.chromium.html',
|
||||
'!**/LICENSE',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/insane/insane.js',
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/semver/semver.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/unit/assert.js',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
|
||||
// except specific folders
|
||||
'!test/automation/out/**',
|
||||
'!test/smoke/out/**',
|
||||
'!extensions/typescript-language-features/test-workspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!build/monaco/**',
|
||||
'!build/win32/**',
|
||||
|
||||
// except multiple specific files
|
||||
'!**/package.json',
|
||||
'!**/yarn.lock',
|
||||
'!**/yarn-error.log',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/codicon/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/out/**',
|
||||
'!extensions/**/snippets/**',
|
||||
'!extensions/**/syntaxes/**',
|
||||
'!extensions/**/themes/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
|
||||
// except specific file types
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
|
||||
'!build/{lib,download,darwin}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!**/Dockerfile.*',
|
||||
'!**/*.Dockerfile',
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js',
|
||||
];
|
||||
|
||||
const copyrightFilter = [
|
||||
'**',
|
||||
'!**/*.desktop',
|
||||
'!**/*.json',
|
||||
'!**/*.html',
|
||||
'!**/*.template',
|
||||
'!**/*.md',
|
||||
'!**/*.bat',
|
||||
'!**/*.cmd',
|
||||
'!**/*.ico',
|
||||
'!**/*.icns',
|
||||
'!**/*.xml',
|
||||
'!**/*.sh',
|
||||
'!**/*.txt',
|
||||
'!**/*.xpm',
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/*.js.map',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!resources/web/code-web.js',
|
||||
'!resources/completions/**',
|
||||
'!extensions/configuration-editing/build/inline-allOf.ts',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
];
|
||||
|
||||
const jsHygieneFilter = [
|
||||
'src/**/*.js',
|
||||
'build/gulpfile.*.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/**/insane.js',
|
||||
'!src/**/marked.js',
|
||||
'!src/**/semver.js',
|
||||
'!**/test/**',
|
||||
];
|
||||
module.exports.jsHygieneFilter = jsHygieneFilter;
|
||||
|
||||
const tsHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
'!**/fixtures/**',
|
||||
'!**/typings/**',
|
||||
'!**/node_modules/**',
|
||||
'!extensions/typescript-basics/test/colorize-fixtures/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||
];
|
||||
module.exports.tsHygieneFilter = tsHygieneFilter;
|
||||
const { all, copyrightFilter, indentationFilter, jsHygieneFilter, tsHygieneFilter } = require('./filters');
|
||||
|
||||
const copyrightHeaderLines = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
@ -163,7 +19,10 @@ const copyrightHeaderLines = [
|
||||
' *--------------------------------------------------------------------------------------------*/',
|
||||
];
|
||||
|
||||
function hygiene(some) {
|
||||
function hygiene(some, linting = true) {
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
const tsfmt = require('typescript-formatter');
|
||||
|
||||
let errorCount = 0;
|
||||
|
||||
const productJson = es.through(function (file) {
|
||||
@ -273,26 +132,32 @@ function hygiene(some) {
|
||||
.pipe(filter(copyrightFilter))
|
||||
.pipe(copyrights);
|
||||
|
||||
const typescript = result.pipe(filter(tsHygieneFilter)).pipe(formatting);
|
||||
const streams = [
|
||||
result.pipe(filter(tsHygieneFilter)).pipe(formatting)
|
||||
];
|
||||
|
||||
const javascript = result
|
||||
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
|
||||
.pipe(
|
||||
gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint'],
|
||||
})
|
||||
)
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(
|
||||
gulpeslint.results((results) => {
|
||||
errorCount += results.warningCount;
|
||||
errorCount += results.errorCount;
|
||||
})
|
||||
if (linting) {
|
||||
streams.push(
|
||||
result
|
||||
.pipe(filter([...jsHygieneFilter, ...tsHygieneFilter]))
|
||||
.pipe(
|
||||
gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint'],
|
||||
})
|
||||
)
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(
|
||||
gulpeslint.results((results) => {
|
||||
errorCount += results.warningCount;
|
||||
errorCount += results.errorCount;
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let count = 0;
|
||||
return es.merge(typescript, javascript).pipe(
|
||||
return es.merge(...streams).pipe(
|
||||
es.through(
|
||||
function (data) {
|
||||
count++;
|
||||
@ -337,7 +202,7 @@ function createGitIndexVinyls(paths) {
|
||||
}
|
||||
|
||||
cp.exec(
|
||||
`git show :${relativePath}`,
|
||||
process.platform === 'win32' ? `git show :${relativePath}` : `git show ':${relativePath}'`,
|
||||
{ maxBuffer: 2000 * 1024, encoding: 'buffer' },
|
||||
(err, out) => {
|
||||
if (err) {
|
||||
|
118
lib/vscode/build/lib/asar.js
Normal file
118
lib/vscode/build/lib/asar.js
Normal file
@ -0,0 +1,118 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createAsar = void 0;
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
const VinylFile = require("vinyl");
|
||||
const minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
const shouldUnpackFile = (file) => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out = [];
|
||||
// Keep track of pending inserts
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
const seenDir = {};
|
||||
const insertDirectoryRecursive = (dir) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
const insertDirectoryForFile = (file) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||
// Create a closure capturing `onFileInserted`.
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||
};
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error(`unknown item in stream!`);
|
||||
}
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
base: '.',
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
}
|
||||
else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
let finish = () => {
|
||||
{
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
const contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
this.queue(new VinylFile({
|
||||
base: '.',
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = () => {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.createAsar = createAsar;
|
120
lib/vscode/build/lib/builtInExtensions.js
Normal file
120
lib/vscode/build/lib/builtInExtensions.js
Normal file
@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getBuiltInExtensions = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
const rimraf = require("rimraf");
|
||||
const es = require("event-stream");
|
||||
const rename = require("gulp-rename");
|
||||
const vfs = require("vinyl-fs");
|
||||
const ext = require("./extensions");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const mkdirp = require('mkdirp');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productjson.builtInExtensions;
|
||||
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
function log(...messages) {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog(...messages);
|
||||
}
|
||||
}
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
function isUpToDate(extension) {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
function syncExtension(extension, controlState) {
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
}
|
||||
catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
function writeControlFile(control) {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
function getBuiltInExtensions() {
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
writeControlFile(control);
|
||||
return new Promise((resolve, reject) => {
|
||||
es.merge(streams)
|
||||
.on('error', reject)
|
||||
.on('end', resolve);
|
||||
});
|
||||
}
|
||||
exports.getBuiltInExtensions = getBuiltInExtensions;
|
||||
if (require.main === module) {
|
||||
getBuiltInExtensions().then(() => process.exit(0)).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
@ -21,6 +21,7 @@ interface IExtensionDefinition {
|
||||
name: string;
|
||||
version: string;
|
||||
repo: string;
|
||||
platforms?: string[];
|
||||
metadata: {
|
||||
id: string;
|
||||
publisherId: {
|
||||
@ -82,6 +83,15 @@ function syncMarketplaceExtension(extension: IExtensionDefinition): Stream {
|
||||
}
|
||||
|
||||
function syncExtension(extension: IExtensionDefinition, controlState: 'disabled' | 'marketplace'): Stream {
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
|
464
lib/vscode/build/lib/bundle.js
Normal file
464
lib/vscode/build/lib/bundle.js
Normal file
@ -0,0 +1,464 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.bundle = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vm = require("vm");
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
function bundle(entryPoints, config, callback) {
|
||||
const entryPointsMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
const allMentionedModulesMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
allMentionedModulesMap[includedModule] = true;
|
||||
});
|
||||
(module.exclude || []).forEach(function (excludedModule) {
|
||||
allMentionedModulesMap[excludedModule] = true;
|
||||
});
|
||||
});
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
const loader = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
loader.config(config);
|
||||
loader(['require'], (localRequire) => {
|
||||
const resolvePath = (path) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
if (entryPoint.prepend) {
|
||||
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
const modules = loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, (err) => callback(err, null));
|
||||
}
|
||||
exports.bundle = bundle;
|
||||
function emitEntryPoints(modules, entryPoints) {
|
||||
const modulesMap = {};
|
||||
modules.forEach((m) => {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
const modulesGraph = {};
|
||||
modules.forEach((m) => {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
let result = [];
|
||||
const usedPlugins = {};
|
||||
const bundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
Object.keys(entryPoints).forEach((moduleToBundle) => {
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach((excludeRoot) => {
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude) => {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
const includedModules = sortedModules.filter((module) => {
|
||||
return allDependencies[module];
|
||||
});
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
|
||||
result = result.concat(res.files);
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
const write = (filename, contents) => {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
path: null,
|
||||
contents: contents
|
||||
}]
|
||||
});
|
||||
};
|
||||
plugin.finishBuild(write);
|
||||
}
|
||||
});
|
||||
return {
|
||||
// TODO@TS 2.1.2
|
||||
files: extractStrings(removeDuplicateTSBoilerplate(result)),
|
||||
bundleData: bundleData
|
||||
};
|
||||
}
|
||||
function extractStrings(destFiles) {
|
||||
const parseDefineCall = (moduleMatch, depsMatch) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
let prefix = null;
|
||||
let _path = null;
|
||||
const pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
}
|
||||
else {
|
||||
prefix = '';
|
||||
_path = pieces[0];
|
||||
}
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
});
|
||||
return {
|
||||
module: module,
|
||||
deps: deps
|
||||
};
|
||||
};
|
||||
destFiles.forEach((destFile) => {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
if (/\.nls\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
// Do one pass to record the usage counts for each module id
|
||||
const useCounts = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach((dep) => {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
const replacementMap = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
});
|
||||
});
|
||||
destFile.sources.unshift({
|
||||
path: null,
|
||||
contents: [
|
||||
'(function() {',
|
||||
`var __m = ${JSON.stringify(sortedByUseModules)};`,
|
||||
`var __M = function(deps) {`,
|
||||
` var result = [];`,
|
||||
` for (var i = 0, len = deps.length; i < len; i++) {`,
|
||||
` result[i] = __m[deps[i]];`,
|
||||
` }`,
|
||||
` return result;`,
|
||||
`};`
|
||||
].join('\n')
|
||||
});
|
||||
destFile.sources.push({
|
||||
path: null,
|
||||
contents: '}).call(this);'
|
||||
});
|
||||
});
|
||||
return destFiles;
|
||||
}
|
||||
function removeDuplicateTSBoilerplate(destFiles) {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
const BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach((destFile) => {
|
||||
const SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
IS_REMOVING_BOILERPLATE = false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
END_BOILERPLATE = boilerplate.end;
|
||||
}
|
||||
else {
|
||||
SEEN_BOILERPLATE[j] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
}
|
||||
else {
|
||||
newLines.push(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
source.contents = newLines.join('\n');
|
||||
});
|
||||
});
|
||||
return destFiles;
|
||||
}
|
||||
function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, append, dest) {
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
const mainResult = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
}, results = [mainResult];
|
||||
const usedPlugins = {};
|
||||
const getLoaderPlugin = (pluginName) => {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
return usedPlugins[pluginName];
|
||||
};
|
||||
includedModules.forEach((c) => {
|
||||
const bangIndex = c.indexOf('!');
|
||||
if (bangIndex >= 0) {
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
const module = modulesMap[c];
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
}
|
||||
else {
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
const req = (() => {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = something => something;
|
||||
const write = (filename, contents) => {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
path: null,
|
||||
contents: contents
|
||||
}]
|
||||
});
|
||||
};
|
||||
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
||||
}
|
||||
});
|
||||
const toIFile = (path) => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
return {
|
||||
files: results,
|
||||
usedPlugins: usedPlugins
|
||||
};
|
||||
}
|
||||
function readFileAndRemoveBOM(path) {
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
let result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
const write = ((what) => {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = () => {
|
||||
return entryPoint;
|
||||
};
|
||||
write.asModule = (moduleId, code) => {
|
||||
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
||||
result += code;
|
||||
};
|
||||
plugin.write(pluginName, moduleName, write);
|
||||
}
|
||||
return {
|
||||
path: null,
|
||||
contents: result
|
||||
};
|
||||
}
|
||||
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
// `parensOffset` is the position in code: define|()
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
||||
};
|
||||
}
|
||||
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Convert a position (line:col) to (offset) in string `str`
|
||||
*/
|
||||
function positionToOffset(str, desiredLine, desiredCol) {
|
||||
if (desiredLine === 1) {
|
||||
return desiredCol - 1;
|
||||
}
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
return lastNewLineOffset + 1 + desiredCol - 1;
|
||||
}
|
||||
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
|
||||
line++;
|
||||
} while (lastNewLineOffset >= 0);
|
||||
return -1;
|
||||
}
|
||||
/**
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes, graph) {
|
||||
const result = {};
|
||||
const queue = rootNodes;
|
||||
rootNodes.forEach((node) => {
|
||||
result[node] = true;
|
||||
});
|
||||
while (queue.length > 0) {
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
queue.push(toNode);
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Perform a topological sort on `graph`
|
||||
*/
|
||||
function topologicalSort(graph) {
|
||||
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach((fromNode) => {
|
||||
allNodes[fromNode] = true;
|
||||
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
||||
graph[fromNode].forEach((toNode) => {
|
||||
allNodes[toNode] = true;
|
||||
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
||||
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
||||
inverseEdges[toNode].push(fromNode);
|
||||
});
|
||||
});
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
const S = [], L = [];
|
||||
Object.keys(allNodes).forEach((node) => {
|
||||
if (outgoingEdgeCount[node] === 0) {
|
||||
delete outgoingEdgeCount[node];
|
||||
S.push(node);
|
||||
}
|
||||
});
|
||||
while (S.length > 0) {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
const n = S.shift();
|
||||
L.push(n);
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m) => {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
delete outgoingEdgeCount[m];
|
||||
S.push(m);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (Object.keys(outgoingEdgeCount).length > 0) {
|
||||
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
|
||||
}
|
||||
return L;
|
||||
}
|
174
lib/vscode/build/lib/compilation.js
Normal file
174
lib/vscode/build/lib/compilation.js
Normal file
@ -0,0 +1,174 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.watchTask = exports.compileTask = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const monacodts = require("./monaco-api");
|
||||
const nls = require("./nls");
|
||||
const reporter_1 = require("./reporter");
|
||||
const util = require("./util");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const os = require("os");
|
||||
const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
let options = {};
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
const tsb = require('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
function pipeline(token) {
|
||||
const bom = require('gulp-bom');
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: overrideOptions.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
pipeline.tsProjectSrc = () => {
|
||||
return compilation.src({ base: src });
|
||||
};
|
||||
return pipeline;
|
||||
}
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
if (os.totalmem() < 4000000000) {
|
||||
throw new Error('compilation requires 4GB of RAM');
|
||||
}
|
||||
const compile = createCompile(src, build, true);
|
||||
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
const compile = createCompile('src', build);
|
||||
const src = gulp.src('src/**', { base: 'src' });
|
||||
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
class MonacoGenerator {
|
||||
constructor(isWatch) {
|
||||
this._executeSoonTimer = null;
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId, filePath) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
this._watchedFiles[filePath] = true;
|
||||
fs.watchFile(filePath, () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
readFileSync(moduleId, filePath) {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
if (this._isWatch) {
|
||||
fs.watchFile(monacodts.RECIPE_PATH, () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
}
|
||||
}
|
||||
_executeSoon() {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
_run() {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
_log(message, ...rest) {
|
||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
execute() {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
}
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
}
|
@ -8,9 +8,6 @@
|
||||
import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
import * as gulp from 'gulp';
|
||||
import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as monacodts from './monaco-api';
|
||||
import * as nls from './nls';
|
||||
@ -41,12 +38,17 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
|
||||
}
|
||||
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
const tsb = require('gulp-tsb') as typeof import('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
|
||||
|
||||
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
|
||||
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
|
||||
function pipeline(token?: util.ICancellationToken) {
|
||||
const bom = require('gulp-bom') as typeof import('gulp-bom');
|
||||
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
@ -61,7 +63,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
|
60
lib/vscode/build/lib/dependencies.js
Normal file
60
lib/vscode/build/lib/dependencies.js
Normal file
@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getProductionDependencies = void 0;
|
||||
const path = require("path");
|
||||
const cp = require("child_process");
|
||||
const _ = require("underscore");
|
||||
const parseSemver = require('parse-semver');
|
||||
function asYarnDependency(prefix, tree) {
|
||||
let parseResult;
|
||||
try {
|
||||
parseResult = parseSemver(tree.name);
|
||||
}
|
||||
catch (err) {
|
||||
err.message += `: ${tree.name}`;
|
||||
console.warn(`Could not parse semver: ${tree.name}`);
|
||||
return null;
|
||||
}
|
||||
// not an actual dependency in disk
|
||||
if (parseResult.version !== parseResult.range) {
|
||||
return null;
|
||||
}
|
||||
const name = parseResult.name;
|
||||
const version = parseResult.version;
|
||||
const dependencyPath = path.join(prefix, name);
|
||||
const children = [];
|
||||
for (const child of (tree.children || [])) {
|
||||
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
|
||||
if (dep) {
|
||||
children.push(dep);
|
||||
}
|
||||
}
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: Object.assign(Object.assign({}, process.env), { NODE_ENV: 'production' }), stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
if (!match || match.length !== 1) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter((dep) => !!dep);
|
||||
}
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = (dep) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
return _.uniq(result);
|
||||
}
|
||||
exports.getProductionDependencies = getProductionDependencies;
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
@ -5,12 +5,27 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
import * as path from 'path';
|
||||
import * as cp from 'child_process';
|
||||
import * as _ from 'underscore';
|
||||
const parseSemver = require('parse-semver');
|
||||
const cp = require('child_process');
|
||||
const _ = require('underscore');
|
||||
|
||||
function asYarnDependency(prefix, tree) {
|
||||
interface Tree {
|
||||
readonly name: string;
|
||||
readonly children?: Tree[];
|
||||
}
|
||||
|
||||
interface FlatDependency {
|
||||
readonly name: string;
|
||||
readonly version: string;
|
||||
readonly path: string;
|
||||
}
|
||||
|
||||
interface Dependency extends FlatDependency {
|
||||
readonly children: Dependency[];
|
||||
}
|
||||
|
||||
function asYarnDependency(prefix: string, tree: Tree): Dependency | null {
|
||||
let parseResult;
|
||||
|
||||
try {
|
||||
@ -42,7 +57,7 @@ function asYarnDependency(prefix, tree) {
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
function getYarnProductionDependencies(cwd: string): Dependency[] {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
@ -50,25 +65,22 @@ function getYarnProductionDependencies(cwd) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
const trees = JSON.parse(match[0]).data.trees as Tree[];
|
||||
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter(dep => !!dep);
|
||||
.filter<Dependency>((dep): dep is Dependency => !!dep);
|
||||
}
|
||||
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
export function getProductionDependencies(cwd: string): FlatDependency[] {
|
||||
const result: FlatDependency[] = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = dep => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
const flatten = (dep: Dependency) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
|
||||
return _.uniq(result);
|
||||
}
|
||||
|
||||
module.exports.getProductionDependencies = getProductionDependencies;
|
||||
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(__dirname);
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
111
lib/vscode/build/lib/electron.js
Normal file
111
lib/vscode/build/lib/electron.js
Normal file
@ -0,0 +1,111 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.config = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vfs = require("vinyl-fs");
|
||||
const filter = require("gulp-filter");
|
||||
const _ = require("underscore");
|
||||
const util = require("./util");
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
exports.config = {
|
||||
version: util.getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(['bat', 'cmd'], 'resources/darwin/bat.icns'),
|
||||
darwinBundleDocumentType(['bowerrc'], 'resources/darwin/bower.icns'),
|
||||
darwinBundleDocumentType(['c', 'h'], 'resources/darwin/c.icns'),
|
||||
darwinBundleDocumentType(['config', 'editorconfig', 'gitattributes', 'gitconfig', 'gitignore', 'ini'], 'resources/darwin/config.icns'),
|
||||
darwinBundleDocumentType(['cc', 'cpp', 'cxx', 'c++', 'hh', 'hpp', 'hxx', 'h++'], 'resources/darwin/cpp.icns'),
|
||||
darwinBundleDocumentType(['cs', 'csx'], 'resources/darwin/csharp.icns'),
|
||||
darwinBundleDocumentType(['css'], 'resources/darwin/css.icns'),
|
||||
darwinBundleDocumentType(['go'], 'resources/darwin/go.icns'),
|
||||
darwinBundleDocumentType(['asp', 'aspx', 'cshtml', 'htm', 'html', 'jshtm', 'jsp', 'phtml', 'shtml'], 'resources/darwin/html.icns'),
|
||||
darwinBundleDocumentType(['jade'], 'resources/darwin/jade.icns'),
|
||||
darwinBundleDocumentType(['jav', 'java'], 'resources/darwin/java.icns'),
|
||||
darwinBundleDocumentType(['js', 'jscsrc', 'jshintrc', 'mjs', 'cjs'], 'resources/darwin/javascript.icns'),
|
||||
darwinBundleDocumentType(['json'], 'resources/darwin/json.icns'),
|
||||
darwinBundleDocumentType(['less'], 'resources/darwin/less.icns'),
|
||||
darwinBundleDocumentType(['markdown', 'md', 'mdoc', 'mdown', 'mdtext', 'mdtxt', 'mdwn', 'mkd', 'mkdn'], 'resources/darwin/markdown.icns'),
|
||||
darwinBundleDocumentType(['php'], 'resources/darwin/php.icns'),
|
||||
darwinBundleDocumentType(['ps1', 'psd1', 'psm1'], 'resources/darwin/powershell.icns'),
|
||||
darwinBundleDocumentType(['py'], 'resources/darwin/python.icns'),
|
||||
darwinBundleDocumentType(['gemspec', 'rb'], 'resources/darwin/ruby.icns'),
|
||||
darwinBundleDocumentType(['scss'], 'resources/darwin/sass.icns'),
|
||||
darwinBundleDocumentType(['bash', 'bash_login', 'bash_logout', 'bash_profile', 'bashrc', 'profile', 'rhistory', 'rprofile', 'sh', 'zlogin', 'zlogout', 'zprofile', 'zsh', 'zshenv', 'zshrc'], 'resources/darwin/shell.icns'),
|
||||
darwinBundleDocumentType(['sql'], 'resources/darwin/sql.icns'),
|
||||
darwinBundleDocumentType(['ts'], 'resources/darwin/typescript.icns'),
|
||||
darwinBundleDocumentType(['tsx', 'jsx'], 'resources/darwin/react.icns'),
|
||||
darwinBundleDocumentType(['vue'], 'resources/darwin/vue.icns'),
|
||||
darwinBundleDocumentType(['ascx', 'csproj', 'dtd', 'wxi', 'wxl', 'wxs', 'xml', 'xaml'], 'resources/darwin/xml.icns'),
|
||||
darwinBundleDocumentType(['eyaml', 'eyml', 'yaml', 'yml'], 'resources/darwin/yaml.icns'),
|
||||
darwinBundleDocumentType(['clj', 'cljs', 'cljx', 'clojure', 'code-workspace', 'coffee', 'containerfile', 'ctp', 'dockerfile', 'dot', 'edn', 'fs', 'fsi', 'fsscript', 'fsx', 'handlebars', 'hbs', 'lua', 'm', 'makefile', 'ml', 'mli', 'pl', 'pl6', 'pm', 'pm6', 'pod', 'pp', 'properties', 'psgi', 'pug', 'r', 'rs', 'rt', 'svg', 'svgz', 't', 'txt', 'vb', 'xcodeproj', 'xcworkspace'], 'resources/darwin/default.icns')
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor');
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
async function main(arch = process.arch) {
|
||||
const version = util.getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
if (require.main === module) {
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
@ -9,12 +9,9 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as json from 'gulp-json-editor';
|
||||
import * as _ from 'underscore';
|
||||
import * as util from './util';
|
||||
|
||||
const electron = require('gulp-atom-electron');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
@ -86,6 +83,9 @@ export const config = {
|
||||
|
||||
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
|
59
lib/vscode/build/lib/eslint/code-import-patterns.js
Normal file
59
lib/vscode/build/lib/eslint/code-import-patterns.js
Normal file
@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const minimatch = require("minimatch");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
badImport: 'Imports violates \'{{restrictions}}\' restrictions. See https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const configs = context.options;
|
||||
for (const config of configs) {
|
||||
if (minimatch(context.getFilename(), config.target)) {
|
||||
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
_checkImport(context, config, node, path) {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
let restrictions;
|
||||
if (typeof config.restrictions === 'string') {
|
||||
restrictions = [config.restrictions];
|
||||
}
|
||||
else {
|
||||
restrictions = config.restrictions;
|
||||
}
|
||||
let matched = false;
|
||||
for (const pattern of restrictions) {
|
||||
if (minimatch(path, pattern)) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
// None of the restrictions matched
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport',
|
||||
data: {
|
||||
restrictions: restrictions.join(' or ')
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
68
lib/vscode/build/lib/eslint/code-layering.js
Normal file
68
lib/vscode/build/lib/eslint/code-layering.js
Normal file
@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
layerbreaker: 'Bad layering. You are not allowed to access {{from}} from here, allowed layers are: [{{allowed}}]'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileDirname = path_1.dirname(context.getFilename());
|
||||
const parts = fileDirname.split(/\\|\//);
|
||||
const ruleArgs = context.options[0];
|
||||
let config;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
|
||||
disallowed: new Set()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
if (!config.allowed.has(key)) {
|
||||
config.disallowed.add(key);
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!config) {
|
||||
// nothing
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(context.getFilename()), path);
|
||||
}
|
||||
const parts = path_1.dirname(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
if (config.allowed.has(part)) {
|
||||
// GOOD - same layer
|
||||
break;
|
||||
}
|
||||
if (config.disallowed.has(part)) {
|
||||
// BAD - wrong layer
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'layerbreaker',
|
||||
data: {
|
||||
from: part,
|
||||
allowed: [...config.allowed.keys()].join(', ')
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noNls: 'Not allowed to import vs/nls in standalone editor modules. Use standaloneStrings.ts'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileName = context.getFilename();
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)nls/.test(path)) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'noNls'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}
|
||||
};
|
41
lib/vscode/build/lib/eslint/code-no-standalone-editor.js
Normal file
41
lib/vscode/build/lib/eslint/code-no-standalone-editor.js
Normal file
@ -0,0 +1,41 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
badImport: 'Not allowed to import standalone editor modules.'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
if (/vs(\/|\\)editor/.test(context.getFilename())) {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
111
lib/vscode/build/lib/eslint/code-no-unexternalized-strings.js
Normal file
111
lib/vscode/build/lib/eslint/code-no-unexternalized-strings.js
Normal file
@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
function isStringLiteral(node) {
|
||||
return !!node && node.type === experimental_utils_1.AST_NODE_TYPES.Literal && typeof node.value === 'string';
|
||||
}
|
||||
function isDoubleQuoted(node) {
|
||||
return node.raw[0] === '"' && node.raw[node.raw.length - 1] === '"';
|
||||
}
|
||||
module.exports = new (_a = class NoUnexternalizedStrings {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
doubleQuoted: 'Only use double-quoted strings for externalized strings.',
|
||||
badKey: 'The key \'{{key}}\' doesn\'t conform to a valid localize identifier.',
|
||||
duplicateKey: 'Duplicate key \'{{key}}\' with different message value.',
|
||||
badMessage: 'Message argument to \'{{message}}\' must be a string literal.'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const externalizedStringLiterals = new Map();
|
||||
const doubleQuotedStringLiterals = new Set();
|
||||
function collectDoubleQuotedStrings(node) {
|
||||
if (isStringLiteral(node) && isDoubleQuoted(node)) {
|
||||
doubleQuotedStringLiterals.add(node);
|
||||
}
|
||||
}
|
||||
function visitLocalizeCall(node) {
|
||||
// localize(key, message)
|
||||
const [keyNode, messageNode] = node.arguments;
|
||||
// (1)
|
||||
// extract key so that it can be checked later
|
||||
let key;
|
||||
if (isStringLiteral(keyNode)) {
|
||||
doubleQuotedStringLiterals.delete(keyNode);
|
||||
key = keyNode.value;
|
||||
}
|
||||
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
|
||||
for (let property of keyNode.properties) {
|
||||
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
|
||||
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
|
||||
if (isStringLiteral(property.value)) {
|
||||
doubleQuotedStringLiterals.delete(property.value);
|
||||
key = property.value.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (typeof key === 'string') {
|
||||
let array = externalizedStringLiterals.get(key);
|
||||
if (!array) {
|
||||
array = [];
|
||||
externalizedStringLiterals.set(key, array);
|
||||
}
|
||||
array.push({ call: node, message: messageNode });
|
||||
}
|
||||
// (2)
|
||||
// remove message-argument from doubleQuoted list and make
|
||||
// sure it is a string-literal
|
||||
doubleQuotedStringLiterals.delete(messageNode);
|
||||
if (!isStringLiteral(messageNode)) {
|
||||
context.report({
|
||||
loc: messageNode.loc,
|
||||
messageId: 'badMessage',
|
||||
data: { message: context.getSourceCode().getText(node) }
|
||||
});
|
||||
}
|
||||
}
|
||||
function reportBadStringsAndBadKeys() {
|
||||
// (1)
|
||||
// report all strings that are in double quotes
|
||||
for (const node of doubleQuotedStringLiterals) {
|
||||
context.report({ loc: node.loc, messageId: 'doubleQuoted' });
|
||||
}
|
||||
for (const [key, values] of externalizedStringLiterals) {
|
||||
// (2)
|
||||
// report all invalid NLS keys
|
||||
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
|
||||
for (let value of values) {
|
||||
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
|
||||
}
|
||||
}
|
||||
// (2)
|
||||
// report all invalid duplicates (same key, different message)
|
||||
if (values.length > 1) {
|
||||
for (let i = 1; i < values.length; i++) {
|
||||
if (context.getSourceCode().getText(values[i - 1].message) !== context.getSourceCode().getText(values[i].message)) {
|
||||
context.report({ loc: values[i].call.loc, messageId: 'duplicateKey', data: { key } });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
['Literal']: (node) => collectDoubleQuotedStrings(node),
|
||||
['ExpressionStatement[directive] Literal:exit']: (node) => doubleQuotedStringLiterals.delete(node),
|
||||
['CallExpression[callee.type="MemberExpression"][callee.object.name="nls"][callee.property.name="localize"]:exit']: (node) => visitLocalizeCall(node),
|
||||
['CallExpression[callee.name="localize"][arguments.length>=2]:exit']: (node) => visitLocalizeCall(node),
|
||||
['Program:exit']: reportBadStringsAndBadKeys,
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._rNlsKeys = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/,
|
||||
_a);
|
122
lib/vscode/build/lib/eslint/code-no-unused-expressions.js
Normal file
122
lib/vscode/build/lib/eslint/code-no-unused-expressions.js
Normal file
@ -0,0 +1,122 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
// FORKED FROM https://github.com/eslint/eslint/blob/b23ad0d789a909baf8d7c41a35bc53df932eaf30/lib/rules/no-unused-expressions.js
|
||||
// and added support for `OptionalCallExpression`, see https://github.com/facebook/create-react-app/issues/8107 and https://github.com/eslint/eslint/issues/12642
|
||||
/**
|
||||
* @fileoverview Flag expressions in statement position that do not side effect
|
||||
* @author Michael Ficarra
|
||||
*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//------------------------------------------------------------------------------
|
||||
// Rule Definition
|
||||
//------------------------------------------------------------------------------
|
||||
module.exports = {
|
||||
meta: {
|
||||
type: 'suggestion',
|
||||
docs: {
|
||||
description: 'disallow unused expressions',
|
||||
category: 'Best Practices',
|
||||
recommended: false,
|
||||
url: 'https://eslint.org/docs/rules/no-unused-expressions'
|
||||
},
|
||||
schema: [
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowShortCircuit: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
allowTernary: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
allowTaggedTemplates: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
}
|
||||
},
|
||||
additionalProperties: false
|
||||
}
|
||||
]
|
||||
},
|
||||
create(context) {
|
||||
const config = context.options[0] || {}, allowShortCircuit = config.allowShortCircuit || false, allowTernary = config.allowTernary || false, allowTaggedTemplates = config.allowTaggedTemplates || false;
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node any node
|
||||
* @returns whether the given node structurally represents a directive
|
||||
*/
|
||||
function looksLikeDirective(node) {
|
||||
return node.type === 'ExpressionStatement' &&
|
||||
node.expression.type === 'Literal' && typeof node.expression.value === 'string';
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param predicate ([a] -> Boolean) the function used to make the determination
|
||||
* @param list the input list
|
||||
* @returns the leading sequence of members in the given list that pass the given predicate
|
||||
*/
|
||||
function takeWhile(predicate, list) {
|
||||
for (let i = 0; i < list.length; ++i) {
|
||||
if (!predicate(list[i])) {
|
||||
return list.slice(0, i);
|
||||
}
|
||||
}
|
||||
return list.slice();
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node a Program or BlockStatement node
|
||||
* @returns the leading sequence of directive nodes in the given node's body
|
||||
*/
|
||||
function directives(node) {
|
||||
return takeWhile(looksLikeDirective, node.body);
|
||||
}
|
||||
// eslint-disable-next-line jsdoc/require-description
|
||||
/**
|
||||
* @param node any node
|
||||
* @param ancestors the given node's ancestors
|
||||
* @returns whether the given node is considered a directive in its current position
|
||||
*/
|
||||
function isDirective(node, ancestors) {
|
||||
const parent = ancestors[ancestors.length - 1], grandparent = ancestors[ancestors.length - 2];
|
||||
return (parent.type === 'Program' || parent.type === 'BlockStatement' &&
|
||||
(/Function/u.test(grandparent.type))) &&
|
||||
directives(parent).indexOf(node) >= 0;
|
||||
}
|
||||
/**
|
||||
* Determines whether or not a given node is a valid expression. Recurses on short circuit eval and ternary nodes if enabled by flags.
|
||||
* @param node any node
|
||||
* @returns whether the given node is a valid expression
|
||||
*/
|
||||
function isValidExpression(node) {
|
||||
if (allowTernary) {
|
||||
// Recursive check for ternary and logical expressions
|
||||
if (node.type === 'ConditionalExpression') {
|
||||
return isValidExpression(node.consequent) && isValidExpression(node.alternate);
|
||||
}
|
||||
}
|
||||
if (allowShortCircuit) {
|
||||
if (node.type === 'LogicalExpression') {
|
||||
return isValidExpression(node.right);
|
||||
}
|
||||
}
|
||||
if (allowTaggedTemplates && node.type === 'TaggedTemplateExpression') {
|
||||
return true;
|
||||
}
|
||||
return /^(?:Assignment|OptionalCall|Call|New|Update|Yield|Await)Expression$/u.test(node.type) ||
|
||||
(node.type === 'UnaryExpression' && ['delete', 'void'].indexOf(node.operator) >= 0);
|
||||
}
|
||||
return {
|
||||
ExpressionStatement(node) {
|
||||
if (!isValidExpression(node.expression) && !isDirective(node, context.getAncestors())) {
|
||||
context.report({ node: node, message: 'Expected an assignment or function call and instead saw an expression.' });
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
57
lib/vscode/build/lib/eslint/code-translation-remind.js
Normal file
57
lib/vscode/build/lib/eslint/code-translation-remind.js
Normal file
@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const fs_1 = require("fs");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new (_a = class TranslationRemind {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
missing: 'Please add \'{{resource}}\' to ./build/lib/i18n.resources.json file to use translations here.'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
|
||||
}
|
||||
_checkImport(context, node, path) {
|
||||
if (path !== TranslationRemind.NLS_MODULE) {
|
||||
return;
|
||||
}
|
||||
const currentFile = context.getFilename();
|
||||
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
|
||||
const matchPart = currentFile.match(/vs\/workbench\/contrib\/\w+/);
|
||||
if (!matchService && !matchPart) {
|
||||
return;
|
||||
}
|
||||
const resource = matchService ? matchService[0] : matchPart[0];
|
||||
let resourceDefined = false;
|
||||
let json;
|
||||
try {
|
||||
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
|
||||
}
|
||||
catch (e) {
|
||||
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
||||
return;
|
||||
}
|
||||
const workbenchResources = JSON.parse(json).workbench;
|
||||
workbenchResources.forEach((existingResource) => {
|
||||
if (existingResource.name === resource) {
|
||||
resourceDefined = true;
|
||||
return;
|
||||
}
|
||||
});
|
||||
if (!resourceDefined) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'missing',
|
||||
data: { resource }
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
_a.NLS_MODULE = 'vs/nls',
|
||||
_a);
|
37
lib/vscode/build/lib/eslint/utils.js
Normal file
37
lib/vscode/build/lib/eslint/utils.js
Normal file
@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createImportRuleListener = void 0;
|
||||
function createImportRuleListener(validateImport) {
|
||||
function _checkImport(node) {
|
||||
if (node && node.type === 'Literal' && typeof node.value === 'string') {
|
||||
validateImport(node, node.value);
|
||||
}
|
||||
}
|
||||
return {
|
||||
// import ??? from 'module'
|
||||
ImportDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
// import('module').then(...) OR await import('module')
|
||||
['CallExpression[callee.type="Import"][arguments.length=1] > Literal']: (node) => {
|
||||
_checkImport(node);
|
||||
},
|
||||
// import foo = ...
|
||||
['TSImportEqualsDeclaration > TSExternalModuleReference > Literal']: (node) => {
|
||||
_checkImport(node);
|
||||
},
|
||||
// export ?? from 'module'
|
||||
ExportAllDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
// export {foo} from 'module'
|
||||
ExportNamedDeclaration: (node) => {
|
||||
_checkImport(node.source);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createImportRuleListener = createImportRuleListener;
|
33
lib/vscode/build/lib/eslint/vscode-dts-cancellation.js
Normal file
33
lib/vscode/build/lib/eslint/vscode-dts-cancellation.js
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
|
||||
let found = false;
|
||||
for (let param of node.params) {
|
||||
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
38
lib/vscode/build/lib/eslint/vscode-dts-cancellation.ts
Normal file
38
lib/vscode/build/lib/eslint/vscode-dts-cancellation.ts
Normal file
@ -0,0 +1,38 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { AST_NODE_TYPES, TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node: any) => {
|
||||
|
||||
let found = false;
|
||||
for (let param of (<TSESTree.TSMethodSignature>node).params) {
|
||||
if (param.type === AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
35
lib/vscode/build/lib/eslint/vscode-dts-create-func.js
Normal file
35
lib/vscode/build/lib/eslint/vscode-dts-create-func.js
Normal file
@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new class ApiLiteralOrTypes {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: { url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#creating-objects' },
|
||||
messages: { sync: '`createXYZ`-functions are constructor-replacements and therefore must return sync', }
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSDeclareFunction Identifier[name=/create.*/]']: (node) => {
|
||||
var _a;
|
||||
const decl = node.parent;
|
||||
if (((_a = decl.returnType) === null || _a === void 0 ? void 0 : _a.typeAnnotation.type) !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
|
||||
return;
|
||||
}
|
||||
if (decl.returnType.typeAnnotation.typeName.type !== experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return;
|
||||
}
|
||||
const ident = decl.returnType.typeAnnotation.typeName.name;
|
||||
if (ident === 'Promise' || ident === 'Thenable') {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'sync'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
87
lib/vscode/build/lib/eslint/vscode-dts-event-naming.js
Normal file
87
lib/vscode/build/lib/eslint/vscode-dts-event-naming.js
Normal file
@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new (_a = class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#event-naming'
|
||||
},
|
||||
messages: {
|
||||
naming: 'Event names must follow this patten: `on[Did|Will]<Verb><Subject>`',
|
||||
verb: 'Unknown verb \'{{verb}}\' - is this really a verb? Iff so, then add this verb to the configuration',
|
||||
subject: 'Unknown subject \'{{subject}}\' - This subject has not been used before but it should refer to something in the API',
|
||||
unknown: 'UNKNOWN event declaration, lint-rule needs tweaking'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const config = context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
const verbs = new Set(config.verbs);
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Event"]']: (node) => {
|
||||
var _a, _b;
|
||||
const def = (_b = (_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent) === null || _b === void 0 ? void 0 : _b.parent;
|
||||
const ident = this.getIdent(def);
|
||||
if (!ident) {
|
||||
// event on unknown structure...
|
||||
return context.report({
|
||||
node,
|
||||
message: 'unknown'
|
||||
});
|
||||
}
|
||||
if (allowed.has(ident.name)) {
|
||||
// configured exception
|
||||
return;
|
||||
}
|
||||
const match = ApiEventNaming._nameRegExp.exec(ident.name);
|
||||
if (!match) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'naming'
|
||||
});
|
||||
return;
|
||||
}
|
||||
// check that <verb> is spelled out (configured) as verb
|
||||
if (!verbs.has(match[2].toLowerCase())) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'verb',
|
||||
data: { verb: match[2] }
|
||||
});
|
||||
}
|
||||
// check that a subject (if present) has occurred
|
||||
if (match[3]) {
|
||||
const regex = new RegExp(match[3], 'ig');
|
||||
const parts = context.getSourceCode().getText().split(regex);
|
||||
if (parts.length < 3) {
|
||||
context.report({
|
||||
node: ident,
|
||||
messageId: 'subject',
|
||||
data: { subject: match[3] }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
getIdent(def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
if (def.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return def;
|
||||
}
|
||||
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.ClassProperty) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
return def.key;
|
||||
}
|
||||
return this.getIdent(def.parent);
|
||||
}
|
||||
},
|
||||
_a._nameRegExp = /on(Did|Will)([A-Z][a-z]+)([A-Z][a-z]+)?/,
|
||||
_a);
|
30
lib/vscode/build/lib/eslint/vscode-dts-interface-naming.js
Normal file
30
lib/vscode/build/lib/eslint/vscode-dts-interface-naming.js
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
module.exports = new (_a = class ApiInterfaceNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
naming: 'Interfaces must not be prefixed with uppercase `I`',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSInterfaceDeclaration Identifier']: (node) => {
|
||||
const name = node.name;
|
||||
if (ApiInterfaceNaming._nameRegExp.test(name)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._nameRegExp = /I[A-Z]/,
|
||||
_a);
|
27
lib/vscode/build/lib/eslint/vscode-dts-literal-or-types.js
Normal file
27
lib/vscode/build/lib/eslint/vscode-dts-literal-or-types.js
Normal file
@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiLiteralOrTypes {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
docs: { url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#enums' },
|
||||
messages: { useEnum: 'Use enums, not literal-or-types', }
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSUnionType TSLiteralType']: (node) => {
|
||||
var _a;
|
||||
if (((_a = node.literal) === null || _a === void 0 ? void 0 : _a.type) === 'TSNullKeyword') {
|
||||
return;
|
||||
}
|
||||
context.report({
|
||||
node: node,
|
||||
messageId: 'useEnum'
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
38
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.js
Normal file
38
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.js
Normal file
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
module.exports = new (_a = class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const config = context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node) => {
|
||||
var _a;
|
||||
const interfaceName = ((_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
const methodName = node.key.name;
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._providerFunctionNames = /^(provide|resolve|prepare).+/,
|
||||
_a);
|
45
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
45
lib/vscode/build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
@ -0,0 +1,45 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
|
||||
private static _providerFunctionNames = /^(provide|resolve|prepare).+/;
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const config = <{ allowed: string[] }>context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node: any) => {
|
||||
|
||||
|
||||
const interfaceName = (<TSESTree.TSInterfaceDeclaration>(<TSESTree.Identifier>node).parent?.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
|
||||
const methodName = (<any>(<TSESTree.TSMethodSignatureNonComputedName>node).key).name;
|
||||
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
35
lib/vscode/build/lib/eslint/vscode-dts-region-comments.js
Normal file
35
lib/vscode/build/lib/eslint/vscode-dts-region-comments.js
Normal file
@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const sourceCode = context.getSourceCode();
|
||||
return {
|
||||
['Program']: (_node) => {
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
41
lib/vscode/build/lib/eslint/vscode-dts-region-comments.ts
Normal file
41
lib/vscode/build/lib/eslint/vscode-dts-region-comments.ts
Normal file
@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const sourceCode = context.getSourceCode();
|
||||
|
||||
|
||||
return {
|
||||
['Program']: (_node: any) => {
|
||||
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: <any>comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
24
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.js
Normal file
24
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.js
Normal file
@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node) => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
30
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
30
lib/vscode/build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
@ -0,0 +1,30 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
|
||||
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node: any) => {
|
||||
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
330
lib/vscode/build/lib/extensions.js
Normal file
330
lib/vscode/build/lib/extensions.js
Normal file
@ -0,0 +1,330 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.translatePackageJSON = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const glob = require("glob");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const File = require("vinyl");
|
||||
const stats_1 = require("./stats");
|
||||
const util2 = require("./util");
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const buffer = require('gulp-buffer');
|
||||
const jsoncParser = require("jsonc-parser");
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
|
||||
function minifyExtensionResources(input) {
|
||||
const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true });
|
||||
return input
|
||||
.pipe(jsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(es.mapSync((f) => {
|
||||
const errors = [];
|
||||
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
|
||||
if (errors.length === 0) {
|
||||
// file parsed OK => just stringify to drop whitespace and comments
|
||||
f.contents = Buffer.from(JSON.stringify(value));
|
||||
}
|
||||
return f;
|
||||
}))
|
||||
.pipe(jsonFilter.restore);
|
||||
}
|
||||
function updateExtensionPackageJSON(input, update) {
|
||||
const packageJsonFilter = filter('extensions/*/package.json', { restore: true });
|
||||
return input
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(es.mapSync((f) => {
|
||||
const data = JSON.parse(f.contents.toString('utf8'));
|
||||
f.contents = Buffer.from(JSON.stringify(update(data)));
|
||||
return f;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}
|
||||
function fromLocal(extensionPath, forWeb) {
|
||||
const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js';
|
||||
const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName));
|
||||
let input = isWebPacked
|
||||
? fromLocalWebpack(extensionPath, webpackConfigFileName)
|
||||
: fromLocalNormal(extensionPath);
|
||||
if (isWebPacked) {
|
||||
input = updateExtensionPackageJSON(input, (data) => {
|
||||
delete data.scripts;
|
||||
delete data.dependencies;
|
||||
delete data.devDependencies;
|
||||
if (data.main) {
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
});
|
||||
}
|
||||
return input;
|
||||
}
|
||||
function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||
const result = es.through();
|
||||
const packagedDependencies = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, webpackConfigFileName));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
const vsce = require('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream.
|
||||
const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] });
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
|
||||
const webpackDone = (err, stats) => {
|
||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
const contents = data.contents.toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
es.merge(...webpackStreams, es.readArray(files))
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
const vsce = require('vsce');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
const baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version, metadata) {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const json = require('gulp-json-editor');
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
||||
const options = {
|
||||
base: url,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
return remote('', options)
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}
|
||||
exports.fromMarketplace = fromMarketplace;
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'vscode-test-resolver',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'vscode-notebook-tests',
|
||||
'vscode-custom-editor-tests',
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productJson.builtInExtensions || [];
|
||||
const webBuiltInExtensions = productJson.webBuiltInExtensions || [];
|
||||
/**
|
||||
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts`
|
||||
*/
|
||||
function isWebExtension(manifest) {
|
||||
if (typeof manifest.extensionKind !== 'undefined') {
|
||||
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
|
||||
return (extensionKind.indexOf('web') >= 0);
|
||||
}
|
||||
return (!Boolean(manifest.main) || Boolean(manifest.browser));
|
||||
}
|
||||
function packageLocalExtensionsStream(forWeb) {
|
||||
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const absoluteManifestPath = path.join(root, manifestPath);
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ manifestPath }) => (forWeb ? isWebExtension(require(manifestPath)) : true)));
|
||||
const localExtensionsStream = minifyExtensionResources(es.merge(...localExtensionsDescriptions.map(extension => {
|
||||
return fromLocal(extension.path, forWeb)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})));
|
||||
let result;
|
||||
if (forWeb) {
|
||||
result = localExtensionsStream;
|
||||
}
|
||||
else {
|
||||
// also include shared node modules
|
||||
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' }));
|
||||
}
|
||||
return (result
|
||||
.pipe(util2.setExecutableBit(['**/*.sh'])));
|
||||
}
|
||||
exports.packageLocalExtensionsStream = packageLocalExtensionsStream;
|
||||
function packageMarketplaceExtensionsStream(forWeb) {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
|
||||
.map(extension => {
|
||||
const input = fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
return updateExtensionPackageJSON(input, (data) => {
|
||||
delete data.scripts;
|
||||
delete data.dependencies;
|
||||
delete data.devDependencies;
|
||||
return data;
|
||||
});
|
||||
})));
|
||||
return (marketplaceExtensionsStream
|
||||
.pipe(util2.setExecutableBit(['**/*.sh'])));
|
||||
}
|
||||
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
||||
function scanBuiltinExtensions(extensionsRoot, exclude = []) {
|
||||
const scannedExtensions = [];
|
||||
try {
|
||||
const extensionsFolders = fs.readdirSync(extensionsRoot);
|
||||
for (const extensionFolder of extensionsFolders) {
|
||||
if (exclude.indexOf(extensionFolder) >= 0) {
|
||||
continue;
|
||||
}
|
||||
const packageJSONPath = path.join(extensionsRoot, extensionFolder, 'package.json');
|
||||
if (!fs.existsSync(packageJSONPath)) {
|
||||
continue;
|
||||
}
|
||||
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
|
||||
if (!isWebExtension(packageJSON)) {
|
||||
continue;
|
||||
}
|
||||
const children = fs.readdirSync(path.join(extensionsRoot, extensionFolder));
|
||||
const packageNLSPath = children.filter(child => child === 'package.nls.json')[0];
|
||||
const packageNLS = packageNLSPath ? JSON.parse(fs.readFileSync(path.join(extensionsRoot, extensionFolder, packageNLSPath)).toString()) : undefined;
|
||||
const readme = children.filter(child => /^readme(\.txt|\.md|)$/i.test(child))[0];
|
||||
const changelog = children.filter(child => /^changelog(\.txt|\.md|)$/i.test(child))[0];
|
||||
scannedExtensions.push({
|
||||
extensionPath: extensionFolder,
|
||||
packageJSON,
|
||||
packageNLS,
|
||||
readmePath: readme ? path.join(extensionFolder, readme) : undefined,
|
||||
changelogPath: changelog ? path.join(extensionFolder, changelog) : undefined,
|
||||
});
|
||||
}
|
||||
return scannedExtensions;
|
||||
}
|
||||
catch (ex) {
|
||||
return scannedExtensions;
|
||||
}
|
||||
}
|
||||
exports.scanBuiltinExtensions = scanBuiltinExtensions;
|
||||
function translatePackageJSON(packageJSON, packageNLSPath) {
|
||||
const CharCode_PC = '%'.charCodeAt(0);
|
||||
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
|
||||
const translate = (obj) => {
|
||||
for (let key in obj) {
|
||||
const val = obj[key];
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(translate);
|
||||
}
|
||||
else if (val && typeof val === 'object') {
|
||||
translate(val);
|
||||
}
|
||||
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
|
||||
const translated = packageNls[val.substr(1, val.length - 2)];
|
||||
if (translated) {
|
||||
obj[key] = translated;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
translate(packageJSON);
|
||||
return packageJSON;
|
||||
}
|
||||
exports.translatePackageJSON = translatePackageJSON;
|
@ -10,20 +10,15 @@ import * as gulp from 'gulp';
|
||||
import * as path from 'path';
|
||||
import { Stream } from 'stream';
|
||||
import * as File from 'vinyl';
|
||||
import * as vsce from 'vsce';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util2 from './util';
|
||||
import remote = require('gulp-remote-retry-src');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
import filter = require('gulp-filter');
|
||||
import rename = require('gulp-rename');
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
const buffer = require('gulp-buffer');
|
||||
import json = require('gulp-json-editor');
|
||||
import * as jsoncParser from 'jsonc-parser';
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
@ -97,6 +92,10 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
}
|
||||
}
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
@ -175,6 +174,8 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
@ -200,6 +201,9 @@ const baseHeaders = {
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
|
||||
@ -234,9 +238,13 @@ const excludedExtensions = [
|
||||
'vscode-custom-editor-tests',
|
||||
];
|
||||
|
||||
const marketplaceWebExtensions = [
|
||||
'ms-vscode.references-view'
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
|
||||
interface IBuiltInExtension {
|
||||
name: string;
|
||||
@ -304,7 +312,7 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
|
||||
|
||||
export function packageMarketplaceExtensionsStream(forWeb: boolean): Stream {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? marketplaceWebExtensions.indexOf(name) >= 0 : true)),
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(
|
||||
|
54
lib/vscode/build/lib/git.js
Normal file
54
lib/vscode/build/lib/git.js
Normal file
@ -0,0 +1,54 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getVersion = void 0;
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
function getVersion(repo) {
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head;
|
||||
try {
|
||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||
return head;
|
||||
}
|
||||
const refMatch = /^ref: (.*)$/.exec(head);
|
||||
if (!refMatch) {
|
||||
return undefined;
|
||||
}
|
||||
const ref = refMatch[1];
|
||||
const refPath = path.join(git, ref);
|
||||
try {
|
||||
return fs.readFileSync(refPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
// noop
|
||||
}
|
||||
const packedRefsPath = path.join(git, 'packed-refs');
|
||||
let refsRaw;
|
||||
try {
|
||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch;
|
||||
let refs = {};
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
refs[refsMatch[2]] = refsMatch[1];
|
||||
}
|
||||
return refs[ref];
|
||||
}
|
||||
exports.getVersion = getVersion;
|
1204
lib/vscode/build/lib/i18n.js
Normal file
1204
lib/vscode/build/lib/i18n.js
Normal file
File diff suppressed because it is too large
Load Diff
@ -182,6 +182,10 @@
|
||||
"name": "vs/workbench/contrib/tasks",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/testing",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/terminal",
|
||||
"project": "vscode-workbench"
|
||||
@ -222,6 +226,10 @@
|
||||
"name": "vs/workbench/contrib/customEditor",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/externalUriOpener",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/welcome",
|
||||
"project": "vscode-workbench"
|
||||
@ -250,6 +258,10 @@
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/clipboard",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/commands",
|
||||
"project": "vscode-workbench"
|
||||
|
283
lib/vscode/build/lib/layersChecker.js
Normal file
283
lib/vscode/build/lib/layersChecker.js
Normal file
@ -0,0 +1,283 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const fs_1 = require("fs");
|
||||
const path_1 = require("path");
|
||||
const minimatch_1 = require("minimatch");
|
||||
//
|
||||
// #############################################################################################
|
||||
//
|
||||
// A custom typescript checker for the specific task of detecting the use of certain types in a
|
||||
// layer that does not allow such use. For example:
|
||||
// - using DOM globals in common/node/electron-main layer (e.g. HTMLElement)
|
||||
// - using node.js globals in common/browser layer (e.g. process)
|
||||
//
|
||||
// Make changes to below RULES to lift certain files from these checks only if absolutely needed
|
||||
//
|
||||
// #############################################################################################
|
||||
//
|
||||
// Types we assume are present in all implementations of JS VMs (node.js, browsers)
|
||||
// Feel free to add more core types as you see needed if present in node.js and browsers
|
||||
const CORE_TYPES = [
|
||||
'require',
|
||||
'setTimeout',
|
||||
'clearTimeout',
|
||||
'setInterval',
|
||||
'clearInterval',
|
||||
'console',
|
||||
'log',
|
||||
'info',
|
||||
'warn',
|
||||
'error',
|
||||
'group',
|
||||
'groupEnd',
|
||||
'table',
|
||||
'assert',
|
||||
'Error',
|
||||
'String',
|
||||
'throws',
|
||||
'stack',
|
||||
'captureStackTrace',
|
||||
'stackTraceLimit',
|
||||
'TextDecoder',
|
||||
'TextEncoder',
|
||||
'encode',
|
||||
'decode',
|
||||
'self',
|
||||
'trimLeft',
|
||||
'trimRight'
|
||||
];
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration',
|
||||
'ICommonNativeHostService'
|
||||
];
|
||||
const RULES = [
|
||||
// Tests: skip
|
||||
{
|
||||
target: '**/vs/**/test/**',
|
||||
skip: true // -> skip all test files
|
||||
},
|
||||
// Common: vs/base/common/platform.ts
|
||||
{
|
||||
target: '**/vs/base/common/platform.ts',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// Safe access to postMessage() and friends
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/vs/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/vs/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/vs/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/native/common/native.ts
|
||||
{
|
||||
target: '**/vs/platform/native/common/native.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
||||
{
|
||||
target: '**/vs/workbench/api/common/extHostExtensionService.ts',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common
|
||||
{
|
||||
target: '**/vs/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Browser
|
||||
{
|
||||
target: '**/vs/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Browser (editor contrib)
|
||||
{
|
||||
target: '**/src/vs/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// node.js
|
||||
{
|
||||
target: '**/vs/**/node/**',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// --> types from node.d.ts that duplicate from lib.dom.d.ts
|
||||
'URL',
|
||||
'protocol',
|
||||
'hostname',
|
||||
'port',
|
||||
'pathname',
|
||||
'search',
|
||||
'username',
|
||||
'password'
|
||||
],
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts' // no DOM
|
||||
]
|
||||
},
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Electron (renderer): skip
|
||||
{
|
||||
target: '**/vs/**/electron-browser/**',
|
||||
skip: true // -> supports all types
|
||||
},
|
||||
// Electron (main)
|
||||
{
|
||||
target: '**/vs/**/electron-main/**',
|
||||
allowedTypes: [
|
||||
...CORE_TYPES,
|
||||
// --> types from electron.d.ts that duplicate from lib.dom.d.ts
|
||||
'Event',
|
||||
'Request'
|
||||
],
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts' // no DOM
|
||||
]
|
||||
}
|
||||
];
|
||||
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json');
|
||||
let hasErrors = false;
|
||||
function checkFile(program, sourceFile, rule) {
|
||||
checkNode(sourceFile);
|
||||
function checkNode(node) {
|
||||
var _a, _b;
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return ts.forEachChild(node, checkNode); // recurse down
|
||||
}
|
||||
const text = node.getText(sourceFile);
|
||||
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
||||
return; // override
|
||||
}
|
||||
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
const declarations = symbol.declarations;
|
||||
if (Array.isArray(declarations)) {
|
||||
for (const declaration of declarations) {
|
||||
if (declaration) {
|
||||
const parent = declaration.parent;
|
||||
if (parent) {
|
||||
const parentSourceFile = parent.getSourceFile();
|
||||
if (parentSourceFile) {
|
||||
const definitionFileName = parentSourceFile.fileName;
|
||||
if (rule.disallowedDefinitions) {
|
||||
for (const disallowedDefinition of rule.disallowedDefinitions) {
|
||||
if (definitionFileName.indexOf(disallowedDefinition) >= 0) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' from '${disallowedDefinition}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function createProgram(tsconfigPath) {
|
||||
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
|
||||
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
|
||||
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true });
|
||||
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
|
||||
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
|
||||
}
|
||||
//
|
||||
// Create program and start checking
|
||||
//
|
||||
const program = createProgram(TS_CONFIG_PATH);
|
||||
for (const sourceFile of program.getSourceFiles()) {
|
||||
for (const rule of RULES) {
|
||||
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) {
|
||||
if (!rule.skip) {
|
||||
checkFile(program, sourceFile, rule);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hasErrors) {
|
||||
process.exit(1);
|
||||
}
|
@ -25,8 +25,6 @@ import { match } from 'minimatch';
|
||||
// Feel free to add more core types as you see needed if present in node.js and browsers
|
||||
const CORE_TYPES = [
|
||||
'require', // from our AMD loader
|
||||
// 'atob',
|
||||
// 'btoa',
|
||||
'setTimeout',
|
||||
'clearTimeout',
|
||||
'setInterval',
|
||||
|
628
lib/vscode/build/lib/monaco-api.js
Normal file
628
lib/vscode/build/lib/monaco-api.js
Normal file
@ -0,0 +1,628 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.execute = exports.run3 = exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const dtsv = '3';
|
||||
const tsfmt = require('../../tsfmt.json');
|
||||
const SRC = path.join(__dirname, '../../src');
|
||||
exports.RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe');
|
||||
const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
|
||||
function logErr(message, ...rest) {
|
||||
fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest);
|
||||
}
|
||||
function isDeclaration(ts, a) {
|
||||
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|
||||
|| a.kind === ts.SyntaxKind.EnumDeclaration
|
||||
|| a.kind === ts.SyntaxKind.ClassDeclaration
|
||||
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|
||||
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|
||||
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
|
||||
}
|
||||
function visitTopLevelDeclarations(ts, sourceFile, visitor) {
|
||||
let stop = false;
|
||||
let visit = (node) => {
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.InterfaceDeclaration:
|
||||
case ts.SyntaxKind.EnumDeclaration:
|
||||
case ts.SyntaxKind.ClassDeclaration:
|
||||
case ts.SyntaxKind.VariableStatement:
|
||||
case ts.SyntaxKind.TypeAliasDeclaration:
|
||||
case ts.SyntaxKind.FunctionDeclaration:
|
||||
case ts.SyntaxKind.ModuleDeclaration:
|
||||
stop = visitor(node);
|
||||
}
|
||||
if (stop) {
|
||||
return;
|
||||
}
|
||||
ts.forEachChild(node, visit);
|
||||
};
|
||||
visit(sourceFile);
|
||||
}
|
||||
function getAllTopLevelDeclarations(ts, sourceFile) {
|
||||
let all = [];
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
|
||||
let interfaceDeclaration = node;
|
||||
let triviaStart = interfaceDeclaration.pos;
|
||||
let triviaEnd = interfaceDeclaration.name.pos;
|
||||
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
|
||||
if (triviaText.indexOf('@internal') === -1) {
|
||||
all.push(node);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let nodeText = getNodeText(sourceFile, node);
|
||||
if (nodeText.indexOf('@internal') === -1) {
|
||||
all.push(node);
|
||||
}
|
||||
}
|
||||
return false /*continue*/;
|
||||
});
|
||||
return all;
|
||||
}
|
||||
function getTopLevelDeclaration(ts, sourceFile, typeName) {
|
||||
let result = null;
|
||||
visitTopLevelDeclarations(ts, sourceFile, (node) => {
|
||||
if (isDeclaration(ts, node) && node.name) {
|
||||
if (node.name.text === typeName) {
|
||||
result = node;
|
||||
return true /*stop*/;
|
||||
}
|
||||
return false /*continue*/;
|
||||
}
|
||||
// node is ts.VariableStatement
|
||||
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
|
||||
result = node;
|
||||
return true /*stop*/;
|
||||
}
|
||||
return false /*continue*/;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
function getNodeText(sourceFile, node) {
|
||||
return sourceFile.getFullText().substring(node.pos, node.end);
|
||||
}
|
||||
function hasModifier(modifiers, kind) {
|
||||
if (modifiers) {
|
||||
for (let i = 0; i < modifiers.length; i++) {
|
||||
let mod = modifiers[i];
|
||||
if (mod.kind === kind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isStatic(ts, member) {
|
||||
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
|
||||
}
|
||||
function isDefaultExport(ts, declaration) {
|
||||
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
|
||||
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
|
||||
}
|
||||
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
|
||||
let result = getNodeText(sourceFile, declaration);
|
||||
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
|
||||
let interfaceDeclaration = declaration;
|
||||
const staticTypeName = (isDefaultExport(ts, interfaceDeclaration)
|
||||
? `${importName}.default`
|
||||
: `${importName}.${declaration.name.text}`);
|
||||
let instanceTypeName = staticTypeName;
|
||||
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
|
||||
if (typeParametersCnt > 0) {
|
||||
let arr = [];
|
||||
for (let i = 0; i < typeParametersCnt; i++) {
|
||||
arr.push('any');
|
||||
}
|
||||
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
|
||||
}
|
||||
const members = interfaceDeclaration.members;
|
||||
members.forEach((member) => {
|
||||
try {
|
||||
let memberText = getNodeText(sourceFile, member);
|
||||
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
|
||||
result = result.replace(memberText, '');
|
||||
}
|
||||
else {
|
||||
const memberName = member.name.text;
|
||||
const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`);
|
||||
if (isStatic(ts, member)) {
|
||||
usage.push(`a = ${staticTypeName}${memberAccess};`);
|
||||
}
|
||||
else {
|
||||
usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// life..
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (declaration.kind === ts.SyntaxKind.VariableStatement) {
|
||||
const jsDoc = result.substr(0, declaration.getLeadingTriviaWidth(sourceFile));
|
||||
if (jsDoc.indexOf('@monacodtsreplace') >= 0) {
|
||||
const jsDocLines = jsDoc.split(/\r\n|\r|\n/);
|
||||
let directives = [];
|
||||
for (const jsDocLine of jsDocLines) {
|
||||
const m = jsDocLine.match(/^\s*\* \/([^/]+)\/([^/]+)\/$/);
|
||||
if (m) {
|
||||
directives.push([new RegExp(m[1], 'g'), m[2]]);
|
||||
}
|
||||
}
|
||||
// remove the jsdoc
|
||||
result = result.substr(jsDoc.length);
|
||||
if (directives.length > 0) {
|
||||
// apply replace directives
|
||||
const replacer = createReplacerFromDirectives(directives);
|
||||
result = replacer(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
result = result.replace(/export default /g, 'export ');
|
||||
result = result.replace(/export declare /g, 'export ');
|
||||
result = result.replace(/declare /g, '');
|
||||
let lines = result.split(/\r\n|\r|\n/);
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (/\s*\*/.test(lines[i])) {
|
||||
// very likely a comment
|
||||
continue;
|
||||
}
|
||||
lines[i] = lines[i].replace(/"/g, '\'');
|
||||
}
|
||||
result = lines.join('\n');
|
||||
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
|
||||
result = result.replace(/const enum/, 'enum');
|
||||
enums.push({
|
||||
enumName: declaration.name.getText(sourceFile),
|
||||
text: result
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function format(ts, text, endl) {
|
||||
const REALLY_FORMAT = false;
|
||||
text = preformat(text, endl);
|
||||
if (!REALLY_FORMAT) {
|
||||
return text;
|
||||
}
|
||||
// Parse the source text
|
||||
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
|
||||
// Get the formatting edits on the input sources
|
||||
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
|
||||
// Apply the edits on the input code
|
||||
return applyEdits(text, edits);
|
||||
function countParensCurly(text) {
|
||||
let cnt = 0;
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
if (text.charAt(i) === '(' || text.charAt(i) === '{') {
|
||||
cnt++;
|
||||
}
|
||||
if (text.charAt(i) === ')' || text.charAt(i) === '}') {
|
||||
cnt--;
|
||||
}
|
||||
}
|
||||
return cnt;
|
||||
}
|
||||
function repeatStr(s, cnt) {
|
||||
let r = '';
|
||||
for (let i = 0; i < cnt; i++) {
|
||||
r += s;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
function preformat(text, endl) {
|
||||
let lines = text.split(endl);
|
||||
let inComment = false;
|
||||
let inCommentDeltaIndent = 0;
|
||||
let indent = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i].replace(/\s$/, '');
|
||||
let repeat = false;
|
||||
let lineIndent = 0;
|
||||
do {
|
||||
repeat = false;
|
||||
if (line.substring(0, 4) === ' ') {
|
||||
line = line.substring(4);
|
||||
lineIndent++;
|
||||
repeat = true;
|
||||
}
|
||||
if (line.charAt(0) === '\t') {
|
||||
line = line.substring(1);
|
||||
lineIndent++;
|
||||
repeat = true;
|
||||
}
|
||||
} while (repeat);
|
||||
if (line.length === 0) {
|
||||
continue;
|
||||
}
|
||||
if (inComment) {
|
||||
if (/\*\//.test(line)) {
|
||||
inComment = false;
|
||||
}
|
||||
lines[i] = repeatStr('\t', lineIndent + inCommentDeltaIndent) + line;
|
||||
continue;
|
||||
}
|
||||
if (/\/\*/.test(line)) {
|
||||
inComment = true;
|
||||
inCommentDeltaIndent = indent - lineIndent;
|
||||
lines[i] = repeatStr('\t', indent) + line;
|
||||
continue;
|
||||
}
|
||||
const cnt = countParensCurly(line);
|
||||
let shouldUnindentAfter = false;
|
||||
let shouldUnindentBefore = false;
|
||||
if (cnt < 0) {
|
||||
if (/[({]/.test(line)) {
|
||||
shouldUnindentAfter = true;
|
||||
}
|
||||
else {
|
||||
shouldUnindentBefore = true;
|
||||
}
|
||||
}
|
||||
else if (cnt === 0) {
|
||||
shouldUnindentBefore = /^\}/.test(line);
|
||||
}
|
||||
let shouldIndentAfter = false;
|
||||
if (cnt > 0) {
|
||||
shouldIndentAfter = true;
|
||||
}
|
||||
else if (cnt === 0) {
|
||||
shouldIndentAfter = /{$/.test(line);
|
||||
}
|
||||
if (shouldUnindentBefore) {
|
||||
indent--;
|
||||
}
|
||||
lines[i] = repeatStr('\t', indent) + line;
|
||||
if (shouldUnindentAfter) {
|
||||
indent--;
|
||||
}
|
||||
if (shouldIndentAfter) {
|
||||
indent++;
|
||||
}
|
||||
}
|
||||
return lines.join(endl);
|
||||
}
|
||||
function getRuleProvider(options) {
|
||||
// Share this between multiple formatters using the same options.
|
||||
// This represents the bulk of the space the formatter uses.
|
||||
return ts.formatting.getFormatContext(options);
|
||||
}
|
||||
function applyEdits(text, edits) {
|
||||
// Apply edits in reverse on the existing text
|
||||
let result = text;
|
||||
for (let i = edits.length - 1; i >= 0; i--) {
|
||||
let change = edits[i];
|
||||
let head = result.slice(0, change.span.start);
|
||||
let tail = result.slice(change.span.start + change.span.length);
|
||||
result = head + change.newText + tail;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
function createReplacerFromDirectives(directives) {
|
||||
return (str) => {
|
||||
for (let i = 0; i < directives.length; i++) {
|
||||
str = str.replace(directives[i][0], directives[i][1]);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
}
|
||||
function createReplacer(data) {
|
||||
data = data || '';
|
||||
let rawDirectives = data.split(';');
|
||||
let directives = [];
|
||||
rawDirectives.forEach((rawDirective) => {
|
||||
if (rawDirective.length === 0) {
|
||||
return;
|
||||
}
|
||||
let pieces = rawDirective.split('=>');
|
||||
let findStr = pieces[0];
|
||||
let replaceStr = pieces[1];
|
||||
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
|
||||
findStr = '\\b' + findStr + '\\b';
|
||||
directives.push([new RegExp(findStr, 'g'), replaceStr]);
|
||||
});
|
||||
return createReplacerFromDirectives(directives);
|
||||
}
|
||||
function generateDeclarationFile(ts, recipe, sourceFileGetter) {
|
||||
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
|
||||
let lines = recipe.split(endl);
|
||||
let result = [];
|
||||
let usageCounter = 0;
|
||||
let usageImports = [];
|
||||
let usage = [];
|
||||
let failed = false;
|
||||
usage.push(`var a: any;`);
|
||||
usage.push(`var b: any;`);
|
||||
const generateUsageImport = (moduleId) => {
|
||||
let importName = 'm' + (++usageCounter);
|
||||
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
|
||||
return importName;
|
||||
};
|
||||
let enums = [];
|
||||
let version = null;
|
||||
lines.forEach(line => {
|
||||
if (failed) {
|
||||
return;
|
||||
}
|
||||
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
|
||||
if (m0) {
|
||||
version = m0[1];
|
||||
}
|
||||
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m1) {
|
||||
let moduleId = m1[1];
|
||||
const sourceFile = sourceFileGetter(moduleId);
|
||||
if (!sourceFile) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${moduleId}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
const importName = generateUsageImport(moduleId);
|
||||
let replacer = createReplacer(m1[2]);
|
||||
let typeNames = m1[3].split(/,/);
|
||||
typeNames.forEach((typeName) => {
|
||||
typeName = typeName.trim();
|
||||
if (typeName.length === 0) {
|
||||
return;
|
||||
}
|
||||
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
|
||||
if (!declaration) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${typeName}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m2) {
|
||||
let moduleId = m2[1];
|
||||
const sourceFile = sourceFileGetter(moduleId);
|
||||
if (!sourceFile) {
|
||||
logErr(`While handling ${line}`);
|
||||
logErr(`Cannot find ${moduleId}`);
|
||||
failed = true;
|
||||
return;
|
||||
}
|
||||
const importName = generateUsageImport(moduleId);
|
||||
let replacer = createReplacer(m2[2]);
|
||||
let typeNames = m2[3].split(/,/);
|
||||
let typesToExcludeMap = {};
|
||||
let typesToExcludeArr = [];
|
||||
typeNames.forEach((typeName) => {
|
||||
typeName = typeName.trim();
|
||||
if (typeName.length === 0) {
|
||||
return;
|
||||
}
|
||||
typesToExcludeMap[typeName] = true;
|
||||
typesToExcludeArr.push(typeName);
|
||||
});
|
||||
getAllTopLevelDeclarations(ts, sourceFile).forEach((declaration) => {
|
||||
if (isDeclaration(ts, declaration) && declaration.name) {
|
||||
if (typesToExcludeMap[declaration.name.text]) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// node is ts.VariableStatement
|
||||
let nodeText = getNodeText(sourceFile, declaration);
|
||||
for (let i = 0; i < typesToExcludeArr.length; i++) {
|
||||
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
result.push(line);
|
||||
});
|
||||
if (failed) {
|
||||
return null;
|
||||
}
|
||||
if (version !== dtsv) {
|
||||
if (!version) {
|
||||
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' is written before versioning was introduced.`);
|
||||
}
|
||||
else {
|
||||
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' v${version} does not match runtime v${dtsv}.`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
let resultTxt = result.join(endl);
|
||||
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
|
||||
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
resultTxt = format(ts, resultTxt, endl);
|
||||
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
|
||||
enums.sort((e1, e2) => {
|
||||
if (e1.enumName < e2.enumName) {
|
||||
return -1;
|
||||
}
|
||||
if (e1.enumName > e2.enumName) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
let resultEnums = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
' * Licensed under the MIT License. See License.txt in the project root for license information.',
|
||||
' *--------------------------------------------------------------------------------------------*/',
|
||||
'',
|
||||
'// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.',
|
||||
''
|
||||
].concat(enums.map(e => e.text)).join(endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
resultEnums = format(ts, resultEnums, endl);
|
||||
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
|
||||
return {
|
||||
result: resultTxt,
|
||||
usageContent: `${usageImports.join('\n')}\n\n${usage.join('\n')}`,
|
||||
enums: resultEnums
|
||||
};
|
||||
}
|
||||
function _run(ts, sourceFileGetter) {
|
||||
const recipe = fs.readFileSync(exports.RECIPE_PATH).toString();
|
||||
const t = generateDeclarationFile(ts, recipe, sourceFileGetter);
|
||||
if (!t) {
|
||||
return null;
|
||||
}
|
||||
const result = t.result;
|
||||
const usageContent = t.usageContent;
|
||||
const enums = t.enums;
|
||||
const currentContent = fs.readFileSync(DECLARATION_PATH).toString();
|
||||
const one = currentContent.replace(/\r\n/gm, '\n');
|
||||
const other = result.replace(/\r\n/gm, '\n');
|
||||
const isTheSame = (one === other);
|
||||
return {
|
||||
content: result,
|
||||
usageContent: usageContent,
|
||||
enums: enums,
|
||||
filePath: DECLARATION_PATH,
|
||||
isTheSame
|
||||
};
|
||||
}
|
||||
class FSProvider {
|
||||
existsSync(filePath) {
|
||||
return fs.existsSync(filePath);
|
||||
}
|
||||
statSync(filePath) {
|
||||
return fs.statSync(filePath);
|
||||
}
|
||||
readFileSync(_moduleId, filePath) {
|
||||
return fs.readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
exports.FSProvider = FSProvider;
|
||||
class CacheEntry {
|
||||
constructor(sourceFile, mtime) {
|
||||
this.sourceFile = sourceFile;
|
||||
this.mtime = mtime;
|
||||
}
|
||||
}
|
||||
class DeclarationResolver {
|
||||
constructor(_fsProvider) {
|
||||
this._fsProvider = _fsProvider;
|
||||
this.ts = require('typescript');
|
||||
this._sourceFileCache = Object.create(null);
|
||||
}
|
||||
invalidateCache(moduleId) {
|
||||
this._sourceFileCache[moduleId] = null;
|
||||
}
|
||||
getDeclarationSourceFile(moduleId) {
|
||||
if (this._sourceFileCache[moduleId]) {
|
||||
// Since we cannot trust file watching to invalidate the cache, check also the mtime
|
||||
const fileName = this._getFileName(moduleId);
|
||||
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
|
||||
if (this._sourceFileCache[moduleId].mtime !== mtime) {
|
||||
this._sourceFileCache[moduleId] = null;
|
||||
}
|
||||
}
|
||||
if (!this._sourceFileCache[moduleId]) {
|
||||
this._sourceFileCache[moduleId] = this._getDeclarationSourceFile(moduleId);
|
||||
}
|
||||
return this._sourceFileCache[moduleId] ? this._sourceFileCache[moduleId].sourceFile : null;
|
||||
}
|
||||
_getFileName(moduleId) {
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
return path.join(SRC, moduleId);
|
||||
}
|
||||
return path.join(SRC, `${moduleId}.ts`);
|
||||
}
|
||||
_getDeclarationSourceFile(moduleId) {
|
||||
const fileName = this._getFileName(moduleId);
|
||||
if (!this._fsProvider.existsSync(fileName)) {
|
||||
return null;
|
||||
}
|
||||
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
// const mtime = this._fsProvider.statFileSync()
|
||||
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
|
||||
return new CacheEntry(this.ts.createSourceFile(fileName, fileContents, this.ts.ScriptTarget.ES5), mtime);
|
||||
}
|
||||
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
|
||||
const fileMap = {
|
||||
'file.ts': fileContents
|
||||
};
|
||||
const service = this.ts.createLanguageService(new TypeScriptLanguageServiceHost(this.ts, {}, fileMap, {}));
|
||||
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
|
||||
return new CacheEntry(this.ts.createSourceFile(fileName, text, this.ts.ScriptTarget.ES5), mtime);
|
||||
}
|
||||
}
|
||||
exports.DeclarationResolver = DeclarationResolver;
|
||||
function run3(resolver) {
|
||||
const sourceFileGetter = (moduleId) => resolver.getDeclarationSourceFile(moduleId);
|
||||
return _run(resolver.ts, sourceFileGetter);
|
||||
}
|
||||
exports.run3 = run3;
|
||||
class TypeScriptLanguageServiceHost {
|
||||
constructor(ts, libs, files, compilerOptions) {
|
||||
this._ts = ts;
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
getCompilationSettings() {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames() {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
}
|
||||
getScriptVersion(_fileName) {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion() {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return this._ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
}
|
||||
else {
|
||||
return this._ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName) {
|
||||
return this._ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory() {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(_options) {
|
||||
return 'defaultLib:es5';
|
||||
}
|
||||
isDefaultLibFileName(fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
function execute() {
|
||||
let r = run3(new DeclarationResolver(new FSProvider()));
|
||||
if (!r) {
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
exports.execute = execute;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user