Archived
1
0

Merge pull request #2773 from cdr/upgrade-vscode-1.53

feat(vscode): update to version 1.53.2
This commit is contained in:
Joe Previte 2021-03-05 14:03:10 -07:00 committed by GitHub
commit 925073db31
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1916 changed files with 84281 additions and 66671 deletions

View File

@ -44,6 +44,7 @@ jobs:
yarn --frozen-lockfile yarn --frozen-lockfile
yarn test yarn test
- name: Upload test artifacts - name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: test-videos name: test-videos

View File

@ -1,12 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
main() {
cd "$(dirname "$0")/../.."
cd ./lib/vscode
git add -A
git reset --hard
}
main "$@"

58
ci/dev/update-vscode.sh Executable file
View File

@ -0,0 +1,58 @@
#!/usr/bin/env bash
set -euo pipefail
main() {
cd "$(dirname "$0")/../.."
# Check if the remote exists
# if it doesn't, we add it
if ! git config remote.vscode.url > /dev/null; then
echo "Could not find 'vscode' as a remote"
echo "Adding with: git remote add -f vscode https://github.com/microsoft/vscode.git &> /dev/null"
echo "Supressing output with '&> /dev/null'"
git remote add -f vscode https://github.com/microsoft/vscode.git &> /dev/null
fi
# Ask which version we should update to
# In the future, we'll automate this and grab the latest version automatically
read -r -p "What version of VSCode would you like to update to? (i.e. 1.52) " VSCODE_VERSION_TO_UPDATE
# Check that this version exists
if [[ -z $(git ls-remote --heads vscode release/"$VSCODE_VERSION_TO_UPDATE") ]]; then
echo "Oops, that doesn't look like a valid version."
echo "You entered: $VSCODE_VERSION_TO_UPDATE"
echo "Verify that this branches exists here: https://github.com/microsoft/vscode/branches/all?query=release%2F$VSCODE_VERSION_TO_UPDATE"
exit 1
fi
echo -e "Great! We'll prep a PR for updating to $VSCODE_VERSION_TO_UPDATE\n"
# Check if GitHub CLI is installed
if ! command -v gh &> /dev/null; then
echo "GitHub CLI could not be found."
echo "If you install it before you run this script next time, we'll open a draft PR for you!"
echo -e "See docs here: https://github.com/cli/cli#installation\n"
exit
fi
# Push branch to remote if not already pushed
# If we don't do this, the opening a draft PR step won't work
# because it will stop and ask where you want to push the branch
CURRENT_BRANCH=$(git branch --show-current)
if [[ -z $(git ls-remote --heads origin "$CURRENT_BRANCH") ]]; then
echo "Doesn't look like you've pushed this branch to remote"
echo -e "Pushing now using: git push origin $CURRENT_BRANCH\n"
git push origin "$CURRENT_BRANCH"
fi
echo "Opening a draft PR on GitHub"
# To read about these flags, visit the docs: https://cli.github.com/manual/gh_pr_create
gh pr create --base master --title "feat(vscode): update to version $VSCODE_VERSION_TO_UPDATE" --body "This PR updates vscode to version: $VSCODE_VERSION_TO_UPDATE" --reviewer @cdr/code-server-reviewers --repo cdr/code-server --draft
echo "Going to try to update vscode for you..."
echo -e "Running: git subtree pull --prefix lib/vscode vscode release/${VSCODE_VERSION_TO_UPDATE} --squash\n"
# Try to run subtree update command
git subtree pull --prefix lib/vscode vscode release/"${VSCODE_VERSION_TO_UPDATE}" --squash --message "chore(vscode): update to $VSCODE_VERSION_TO_UPDATE"
}
main "$@"

View File

@ -6,6 +6,7 @@
- [Requirements](#requirements) - [Requirements](#requirements)
- [Development Workflow](#development-workflow) - [Development Workflow](#development-workflow)
- [Updating VS Code](#updating-vs-code) - [Updating VS Code](#updating-vs-code)
- [Notes about Changes](#notes-about-changes)
- [Build](#build) - [Build](#build)
- [Structure](#structure) - [Structure](#structure)
- [Modifications to VS Code](#modifications-to-vs-code) - [Modifications to VS Code](#modifications-to-vs-code)
@ -59,14 +60,18 @@ To develop inside an isolated Docker container:
### Updating VS Code ### Updating VS Code
If you need to update VS Code, you can update the subtree with one line. Here's an example using the version 1.52: To update VS Code, follow these steps:
```shell 1. Run `yarn update:vscode`.
# Add vscode as a new remote if you haven't already and fetch 2. Enter a version. Ex. 1.53
git remote add -f vscode https://github.com/microsoft/vscode.git 3. This will open a draft PR for you.
4. There will be merge conflicts. First commit them.
1. We do this because if we don't, it will be impossible to review your PR.
5. Once they're all fixed, test code-server locally and make sure it all works.
git subtree pull --prefix lib/vscode vscode release/1.52 --squash --message "Update VS Code to 1.52" #### Notes about Changes
```
- watch out for updates to `lib/vscode/src/vs/code/browser/workbench/workbench.html`. You may need to make changes to `src/browser/pages/vscode.html`
## Build ## Build

View File

@ -14,5 +14,6 @@
**/extensions/**/build/** **/extensions/**/build/**
**/extensions/markdown-language-features/media/** **/extensions/markdown-language-features/media/**
**/extensions/typescript-basics/test/colorize-fixtures/** **/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**
# This is a code-server code symlink. # This is a code-server code symlink.
src/vs/base/node/proxy_agent.ts src/vs/base/node/proxy_agent.ts

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,2 @@
dist
node_modules

View File

@ -0,0 +1,10 @@
name: 'Build Chat'
description: 'Notify in chat about build results.'
author: 'Christof Marti'
inputs:
workflow_run_url:
description: 'Workflow run URL of the completed build.'
required: true
runs:
using: 'node12'
main: 'dist/main.js'

View File

@ -0,0 +1,23 @@
{
"name": "build-chat",
"version": "0.0.0",
"author": "Microsoft Corporation",
"license": "MIT",
"description": "A GitHub action to create a Windows Package Manager manifest file.",
"main": "dist/main.js",
"scripts": {
"build": "tsc"
},
"dependencies": {
"@actions/core": "^1.2.6",
"@octokit/rest": "^18.0.12",
"@slack/web-api": "^6.0.0",
"azure-storage": "^2.10.3",
"stream-buffers": "^3.0.2"
},
"devDependencies": {
"@types/node": "^14.14.22",
"@types/stream-buffers": "^3.0.3",
"typescript": "^4.1.3"
}
}

View File

@ -0,0 +1,217 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core';
import { Octokit, RestEndpointMethodTypes } from '@octokit/rest';
import { WebClient } from '@slack/web-api';
import * as storage from 'azure-storage';
import { WritableStreamBuffer } from 'stream-buffers';
(async () => {
const actionUrl = core.getInput('workflow_run_url');
const url = actionUrl || 'https://api.github.com/repos/microsoft/vscode/actions/runs/503514090';
console.log(url);
const parts = url.split('/');
const owner = parts[parts.length - 5];
const repo = parts[parts.length - 4];
const runId = parseInt(parts[parts.length - 1], 10);
if (actionUrl) {
await handleNotification(owner, repo, runId);
} else {
const results = await buildComplete(owner, repo, runId);
for (const message of [...results.logMessages, ...results.messages]) {
console.log(message);
}
}
})()
.then(null, console.error);
const testChannels = ['bot-log', 'bot-test-log'];
async function handleNotification(owner: string, repo: string, runId: number) {
const results = await buildComplete(owner, repo, runId);
if (results.logMessages.length || results.messages.length) {
const web = new WebClient(process.env.SLACK_TOKEN);
const memberships = await listAllMemberships(web);
const memberTestChannels = memberships.filter(m => testChannels.indexOf(m.name) !== -1);
for (const message of results.logMessages) {
for (const testChannel of memberTestChannels) {
await web.chat.postMessage({
text: message,
link_names: true,
channel: testChannel.id,
});
}
}
for (const message of results.messages) {
for (const channel of memberships) {
await web.chat.postMessage({
text: message,
link_names: true,
channel: channel.id,
});
}
}
}
}
async function buildComplete(owner: string, repo: string, runId: number) {
console.log(`buildComplete: https://github.com/${owner}/${repo}/actions/runs/${runId}`);
const auth = `token ${process.env.GITHUB_TOKEN}`;
const octokit = new Octokit({ auth });
const buildResult = (await octokit.actions.getWorkflowRun({
owner,
repo,
run_id: runId,
})).data;
if (buildResult.head_branch !== 'master' && !buildResult.head_branch?.startsWith('release/')) {
console.error('Private branch. Terminating.')
return { logMessages: [], messages: [] };
}
// const buildQuery = `${buildsApiUrl}?$top=10&maxTime=${buildResult.finishTime}&definitions=${buildResult.definition.id}&branchName=${buildResult.sourceBranch}&resultFilter=${results.join(',')}&api-version=5.0-preview.4`;
const buildResults = (await octokit.actions.listWorkflowRuns({
owner,
repo,
workflow_id: buildResult.workflow_id,
branch: buildResult.head_branch || undefined,
per_page: 5, // More returns 502s.
})).data.workflow_runs
.filter(run => run.status === 'completed');
const currentBuildIndex = buildResults.findIndex(build => build.id === buildResult.id);
if (currentBuildIndex === -1) {
console.error('Build not on first page. Terminating.')
console.error(buildResults.map(({ id, status, conclusion }) => ({ id, status, conclusion })));
return { logMessages: [], messages: [] };
}
const slicedResults = buildResults.slice(currentBuildIndex, currentBuildIndex + 2);
const builds = slicedResults
.map<Build>((build, i, array) => ({
data: build,
previousSourceVersion: i < array.length - 1 ? array[i + 1].head_sha : undefined,
authors: [],
buildHtmlUrl: build.html_url,
changesHtmlUrl: '',
}));
const logMessages = builds.slice(0, 1)
.map(build => `Id: ${build.data.id} | Branch: ${build.data.head_branch} | Conclusion: ${build.data.conclusion} | Created: ${build.data.created_at} | Updated: ${build.data.updated_at}`);
const transitionedBuilds = builds.filter((build, i, array) => i < array.length - 1 && transitioned(build, array[i + 1]));
await Promise.all(transitionedBuilds
.map(async build => {
if (build.previousSourceVersion) {
const cmp = await compareCommits(octokit, owner, repo, build.previousSourceVersion, build.data.head_sha);
const commits = cmp.data.commits;
const authors = new Set<string>([
...commits.map((c: any) => c.author.login),
...commits.map((c: any) => c.committer.login),
]);
authors.delete('web-flow'); // GitHub Web UI committer
build.authors = [...authors];
build.changesHtmlUrl = `https://github.com/${owner}/${repo}/compare/${build.previousSourceVersion.substr(0, 7)}...${build.data.head_sha.substr(0, 7)}`; // Shorter than: cmp.data.html_url
}
}));
const vscode = repo === 'vscode';
const name = vscode ? `VS Code ${buildResult.name} Build` : buildResult.name;
// TBD: `Requester: ${vstsToSlackUser(build.requester, build.degraded)}${pingBenForSmokeTests && releaseBuild && build.result === 'partiallySucceeded' ? ' | Ping: @bpasero' : ''}`
const accounts = await readAccounts();
const githubAccountMap = githubToAccounts(accounts);
const messages = transitionedBuilds.map(build => `${name}
Result: ${build.data.conclusion} | Branch: ${build.data.head_branch} | Authors: ${githubToSlackUsers(githubAccountMap, build.authors, build.degraded).sort().join(', ') || `None (rebuild)`}
Build: ${build.buildHtmlUrl}
Changes: ${build.changesHtmlUrl}`);
return { logMessages, messages };
}
const conclusions = ['success', 'failure']
function transitioned(newer: Build, older: Build) {
const newerResult = newer.data.conclusion || 'success';
const olderResult = older.data.conclusion || 'success';
if (newerResult === olderResult) {
return false;
}
if (conclusions.indexOf(newerResult) > conclusions.indexOf(olderResult)) {
newer.degraded = true;
}
return true;
}
async function compareCommits(octokit: Octokit, owner: string, repo: string, base: string, head: string) {
return octokit.repos.compareCommits({ owner, repo, base, head });
}
function githubToSlackUsers(githubToAccounts: Record<string, Accounts>, githubUsers: string[], at?: boolean) {
return githubUsers.map(g => githubToAccounts[g] ? `${at ? '@' : ''}${githubToAccounts[g].slack}` : g);
}
interface Accounts {
github: string;
slack: string;
vsts: string;
}
function githubToAccounts(accounts: Accounts[]) {
return accounts.reduce((m, e) => {
m[e.github] = e;
return m;
}, <Record<string, Accounts>>{});
}
async function readAccounts() {
const connectionString = process.env.BUILD_CHAT_STORAGE_CONNECTION_STRING;
if (!connectionString) {
console.error('Connection string missing.');
return [];
}
const buf = await readFile(connectionString, 'config', '/', 'accounts.json');
return JSON.parse(buf.toString()) as Accounts[];
}
async function readFile(connectionString: string, share: string, directory: string, filename: string) {
return new Promise<Buffer>((resolve, reject) => {
const stream = new WritableStreamBuffer()
const fileService = storage.createFileService(connectionString);
fileService.getFileToStream(share, directory, filename, stream, err => {
if (err) {
reject(err);
} else {
const contents = stream.getContents();
if (contents) {
resolve(contents);
} else {
reject(new Error('No content'));
}
}
});
});
}
interface AllChannels {
channels: {
id: string;
name: string;
is_member: boolean;
}[];
}
async function listAllMemberships(web: WebClient) {
const groups = await web.conversations.list({ types: 'public_channel,private_channel' }) as unknown as AllChannels;
return groups.channels
.filter(c => c.is_member);
}
interface Build {
data: RestEndpointMethodTypes['actions']['getWorkflowRun']['response']['data'];
previousSourceVersion: string | undefined;
authors: string[];
buildHtmlUrl: string;
changesHtmlUrl: string;
degraded?: boolean;
}

View File

@ -0,0 +1,18 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es2017",
"strict": true,
"noUnusedLocals": true,
"resolveJsonModule": true,
"lib": [
"es2017"
],
"sourceMap": true,
"outDir": "./dist",
"rootDir": "./src",
},
"exclude": [
"node_modules"
]
}

View File

@ -0,0 +1,728 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@actions/core@^1.2.6":
version "1.2.6"
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
"@octokit/auth-token@^2.4.4":
version "2.4.4"
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.4.tgz#ee31c69b01d0378c12fd3ffe406030f3d94d3b56"
integrity sha512-LNfGu3Ro9uFAYh10MUZVaT7X2CnNm2C8IDQmabx+3DygYIQjs9FwzFAHN/0t6mu5HEPhxcb1XOuxdpY82vCg2Q==
dependencies:
"@octokit/types" "^6.0.0"
"@octokit/core@^3.2.3":
version "3.2.4"
resolved "https://registry.yarnpkg.com/@octokit/core/-/core-3.2.4.tgz#5791256057a962eca972e31818f02454897fd106"
integrity sha512-d9dTsqdePBqOn7aGkyRFe7pQpCXdibSJ5SFnrTr0axevObZrpz3qkWm7t/NjYv5a66z6vhfteriaq4FRz3e0Qg==
dependencies:
"@octokit/auth-token" "^2.4.4"
"@octokit/graphql" "^4.5.8"
"@octokit/request" "^5.4.12"
"@octokit/types" "^6.0.3"
before-after-hook "^2.1.0"
universal-user-agent "^6.0.0"
"@octokit/endpoint@^6.0.1":
version "6.0.10"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.10.tgz#741ce1fa2f4fb77ce8ebe0c6eaf5ce63f565f8e8"
integrity sha512-9+Xef8nT7OKZglfkOMm7IL6VwxXUQyR7DUSU0LH/F7VNqs8vyd7es5pTfz9E7DwUIx7R3pGscxu1EBhYljyu7Q==
dependencies:
"@octokit/types" "^6.0.0"
is-plain-object "^5.0.0"
universal-user-agent "^6.0.0"
"@octokit/graphql@^4.5.8":
version "4.5.8"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.5.8.tgz#d42373633c3015d0eafce64a8ce196be167fdd9b"
integrity sha512-WnCtNXWOrupfPJgXe+vSmprZJUr0VIu14G58PMlkWGj3cH+KLZEfKMmbUQ6C3Wwx6fdhzVW1CD5RTnBdUHxhhA==
dependencies:
"@octokit/request" "^5.3.0"
"@octokit/types" "^6.0.0"
universal-user-agent "^6.0.0"
"@octokit/openapi-types@^3.0.0":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-3.0.0.tgz#f73d48af2d21bf4f97fbf38fae43b54699e0dbba"
integrity sha512-jOp1CVRw+OBJaZtG9QzZggvJXvyzgDXuW948SWsDiwmyDuCjeYCiF3TDD/qvhpF580RfP7iBIos4AVU6yhgMlA==
"@octokit/plugin-paginate-rest@^2.6.2":
version "2.8.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.8.0.tgz#2b41e12b494e895bf5fb5b12565d2c80a0ecc6ae"
integrity sha512-HtuEQ2AYE4YFEBQN0iHmMsIvVucd5RsnwJmRKIsfAg1/ZeoMaU+jXMnTAZqIUEmcVJA27LjHUm3f1hxf8Fpdxw==
dependencies:
"@octokit/types" "^6.4.0"
"@octokit/plugin-request-log@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.2.tgz#394d59ec734cd2f122431fbaf05099861ece3c44"
integrity sha512-oTJSNAmBqyDR41uSMunLQKMX0jmEXbwD1fpz8FG27lScV3RhtGfBa1/BBLym+PxcC16IBlF7KH9vP1BUYxA+Eg==
"@octokit/plugin-rest-endpoint-methods@4.4.1":
version "4.4.1"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.4.1.tgz#105cf93255432155de078c9efc33bd4e14d1cd63"
integrity sha512-+v5PcvrUcDeFXf8hv1gnNvNLdm4C0+2EiuWt9EatjjUmfriM1pTMM+r4j1lLHxeBQ9bVDmbywb11e3KjuavieA==
dependencies:
"@octokit/types" "^6.1.0"
deprecation "^2.3.1"
"@octokit/request-error@^2.0.0":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.4.tgz#07dd5c0521d2ee975201274c472a127917741262"
integrity sha512-LjkSiTbsxIErBiRh5wSZvpZqT4t0/c9+4dOe0PII+6jXR+oj/h66s7E4a/MghV7iT8W9ffoQ5Skoxzs96+gBPA==
dependencies:
"@octokit/types" "^6.0.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request@^5.3.0", "@octokit/request@^5.4.12":
version "5.4.12"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.12.tgz#b04826fa934670c56b135a81447be2c1723a2ffc"
integrity sha512-MvWYdxengUWTGFpfpefBBpVmmEYfkwMoxonIB3sUGp5rhdgwjXL1ejo6JbgzG/QD9B/NYt/9cJX1pxXeSIUCkg==
dependencies:
"@octokit/endpoint" "^6.0.1"
"@octokit/request-error" "^2.0.0"
"@octokit/types" "^6.0.3"
deprecation "^2.0.0"
is-plain-object "^5.0.0"
node-fetch "^2.6.1"
once "^1.4.0"
universal-user-agent "^6.0.0"
"@octokit/rest@^18.0.12":
version "18.0.12"
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-18.0.12.tgz#278bd41358c56d87c201e787e8adc0cac132503a"
integrity sha512-hNRCZfKPpeaIjOVuNJzkEL6zacfZlBPV8vw8ReNeyUkVvbuCvvrrx8K8Gw2eyHHsmd4dPlAxIXIZ9oHhJfkJpw==
dependencies:
"@octokit/core" "^3.2.3"
"@octokit/plugin-paginate-rest" "^2.6.2"
"@octokit/plugin-request-log" "^1.0.2"
"@octokit/plugin-rest-endpoint-methods" "4.4.1"
"@octokit/types@^6.0.0", "@octokit/types@^6.0.3", "@octokit/types@^6.1.0", "@octokit/types@^6.4.0":
version "6.4.0"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-6.4.0.tgz#f3f47be70bcdb3c26f2c2619f3dd0ced466a265c"
integrity sha512-1FEmuVppZE2zG0rBdQlviRz5cp0udyI63zyhBVPrm0FRNAsQkAXU7IYWQg1XvhChFut8YbFYN1usQpk54D6/4w==
dependencies:
"@octokit/openapi-types" "^3.0.0"
"@types/node" ">= 8"
"@slack/logger@>=1.0.0 <3.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@slack/logger/-/logger-2.0.0.tgz#6a4e1c755849bc0f66dac08a8be54ce790ec0e6b"
integrity sha512-OkIJpiU2fz6HOJujhlhfIGrc8hB4ibqtf7nnbJQDerG0BqwZCfmgtK5sWzZ0TkXVRBKD5MpLrTmCYyMxoMCgPw==
dependencies:
"@types/node" ">=8.9.0"
"@slack/types@^1.7.0":
version "1.10.0"
resolved "https://registry.yarnpkg.com/@slack/types/-/types-1.10.0.tgz#cbf7d83e1027f4cbfd13d6b429f120c7fb09127a"
integrity sha512-tA7GG7Tj479vojfV3AoxbckalA48aK6giGjNtgH6ihpLwTyHE3fIgRrvt8TWfLwW8X8dyu7vgmAsGLRG7hWWOg==
"@slack/web-api@^6.0.0":
version "6.0.0"
resolved "https://registry.yarnpkg.com/@slack/web-api/-/web-api-6.0.0.tgz#14c65ed73c66a187e5f20e12c3898dfd8d5cbf7c"
integrity sha512-YD1wqWuzrYPf4RQyD7OnYS5lImUmNWn+G5V6Qt0N97fPYxqhT72YJtRdSnsTc3VkH5R5imKOhYxb+wqI9hiHnA==
dependencies:
"@slack/logger" ">=1.0.0 <3.0.0"
"@slack/types" "^1.7.0"
"@types/is-stream" "^1.1.0"
"@types/node" ">=12.0.0"
axios "^0.21.1"
eventemitter3 "^3.1.0"
form-data "^2.5.0"
is-stream "^1.1.0"
p-queue "^6.6.1"
p-retry "^4.0.0"
"@types/is-stream@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@types/is-stream/-/is-stream-1.1.0.tgz#b84d7bb207a210f2af9bed431dc0fbe9c4143be1"
integrity sha512-jkZatu4QVbR60mpIzjINmtS1ZF4a/FqdTUTBeQDVOQ2PYyidtwFKr0B5G6ERukKwliq+7mIXvxyppwzG5EgRYg==
dependencies:
"@types/node" "*"
"@types/node@*", "@types/node@>= 8", "@types/node@>=12.0.0", "@types/node@>=8.9.0", "@types/node@^14.14.22":
version "14.14.22"
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.22.tgz#0d29f382472c4ccf3bd96ff0ce47daf5b7b84b18"
integrity sha512-g+f/qj/cNcqKkc3tFqlXOYjrmZA+jNBiDzbP3kH+B+otKFqAdPgVTGP1IeKRdMml/aE69as5S4FqtxAbl+LaMw==
"@types/retry@^0.12.0":
version "0.12.0"
resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d"
integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==
"@types/stream-buffers@^3.0.3":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@types/stream-buffers/-/stream-buffers-3.0.3.tgz#34e565bf64e3e4bdeee23fd4aa58d4636014a02b"
integrity sha512-NeFeX7YfFZDYsCfbuaOmFQ0OjSmHreKBpp7MQ4alWQBHeh2USLsj7qyMyn9t82kjqIX516CR/5SRHnARduRtbQ==
dependencies:
"@types/node" "*"
ajv@^6.12.3:
version "6.12.6"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
dependencies:
fast-deep-equal "^3.1.1"
fast-json-stable-stringify "^2.0.0"
json-schema-traverse "^0.4.1"
uri-js "^4.2.2"
asn1@~0.2.3:
version "0.2.4"
resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==
dependencies:
safer-buffer "~2.1.0"
assert-plus@1.0.0, assert-plus@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525"
integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
aws-sign2@~0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=
aws4@^1.8.0:
version "1.11.0"
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios@^0.21.1:
version "0.21.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.1.tgz#22563481962f4d6bde9a76d516ef0e5d3c09b2b8"
integrity sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==
dependencies:
follow-redirects "^1.10.0"
azure-storage@^2.10.3:
version "2.10.3"
resolved "https://registry.yarnpkg.com/azure-storage/-/azure-storage-2.10.3.tgz#c5966bf929d87587d78f6847040ea9a4b1d4a50a"
integrity sha512-IGLs5Xj6kO8Ii90KerQrrwuJKexLgSwYC4oLWmc11mzKe7Jt2E5IVg+ZQ8K53YWZACtVTMBNO3iGuA+4ipjJxQ==
dependencies:
browserify-mime "~1.2.9"
extend "^3.0.2"
json-edm-parser "0.1.2"
md5.js "1.3.4"
readable-stream "~2.0.0"
request "^2.86.0"
underscore "~1.8.3"
uuid "^3.0.0"
validator "~9.4.1"
xml2js "0.2.8"
xmlbuilder "^9.0.7"
bcrypt-pbkdf@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e"
integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=
dependencies:
tweetnacl "^0.14.3"
before-after-hook@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.1.0.tgz#b6c03487f44e24200dd30ca5e6a1979c5d2fb635"
integrity sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==
browserify-mime@~1.2.9:
version "1.2.9"
resolved "https://registry.yarnpkg.com/browserify-mime/-/browserify-mime-1.2.9.tgz#aeb1af28de6c0d7a6a2ce40adb68ff18422af31f"
integrity sha1-rrGvKN5sDXpqLOQK22j/GEIq8x8=
caseless@~0.12.0:
version "0.12.0"
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=
combined-stream@^1.0.6, combined-stream@~1.0.6:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
dependencies:
delayed-stream "~1.0.0"
core-util-is@1.0.2, core-util-is@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
dashdash@^1.12.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=
dependencies:
assert-plus "^1.0.0"
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
deprecation@^2.0.0, deprecation@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==
ecc-jsbn@~0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9"
integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=
dependencies:
jsbn "~0.1.0"
safer-buffer "^2.1.0"
eventemitter3@^3.1.0:
version "3.1.2"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7"
integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q==
eventemitter3@^4.0.4:
version "4.0.7"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f"
integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==
extend@^3.0.2, extend@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
extsprintf@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=
extsprintf@^1.2.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f"
integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8=
fast-deep-equal@^3.1.1:
version "3.1.3"
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
fast-json-stable-stringify@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
follow-redirects@^1.10.0:
version "1.13.1"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.1.tgz#5f69b813376cee4fd0474a3aba835df04ab763b7"
integrity sha512-SSG5xmZh1mkPGyKzjZP8zLjltIfpW32Y5QpdNJyjcfGxK3qo3NDDkZOZSFiGn1A6SclQxY9GzEwAHQ3dmYRWpg==
forever-agent@~0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
form-data@^2.5.0:
version "2.5.1"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4"
integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==
dependencies:
asynckit "^0.4.0"
combined-stream "^1.0.6"
mime-types "^2.1.12"
form-data@~2.3.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==
dependencies:
asynckit "^0.4.0"
combined-stream "^1.0.6"
mime-types "^2.1.12"
getpass@^0.1.1:
version "0.1.7"
resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa"
integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=
dependencies:
assert-plus "^1.0.0"
har-schema@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=
har-validator@~5.1.3:
version "5.1.5"
resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd"
integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==
dependencies:
ajv "^6.12.3"
har-schema "^2.0.0"
hash-base@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33"
integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==
dependencies:
inherits "^2.0.4"
readable-stream "^3.6.0"
safe-buffer "^5.2.0"
http-signature@~1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=
dependencies:
assert-plus "^1.0.0"
jsprim "^1.2.2"
sshpk "^1.7.0"
inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
is-plain-object@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344"
integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
is-typedarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
isstream@~0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=
jsbn@~0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM=
json-edm-parser@0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/json-edm-parser/-/json-edm-parser-0.1.2.tgz#1e60b0fef1bc0af67bc0d146dfdde5486cd615b4"
integrity sha1-HmCw/vG8CvZ7wNFG393lSGzWFbQ=
dependencies:
jsonparse "~1.2.0"
json-schema-traverse@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
json-schema@0.2.3:
version "0.2.3"
resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=
json-stringify-safe@~5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
jsonparse@~1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.2.0.tgz#5c0c5685107160e72fe7489bddea0b44c2bc67bd"
integrity sha1-XAxWhRBxYOcv50ib3eoLRMK8Z70=
jsprim@^1.2.2:
version "1.4.1"
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=
dependencies:
assert-plus "1.0.0"
extsprintf "1.3.0"
json-schema "0.2.3"
verror "1.10.0"
md5.js@1.3.4:
version "1.3.4"
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.4.tgz#e9bdbde94a20a5ac18b04340fc5764d5b09d901d"
integrity sha1-6b296UogpawYsENA/Fdk1bCdkB0=
dependencies:
hash-base "^3.0.0"
inherits "^2.0.1"
mime-db@1.45.0:
version "1.45.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.45.0.tgz#cceeda21ccd7c3a745eba2decd55d4b73e7879ea"
integrity sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==
mime-types@^2.1.12, mime-types@~2.1.19:
version "2.1.28"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.28.tgz#1160c4757eab2c5363888e005273ecf79d2a0ecd"
integrity sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==
dependencies:
mime-db "1.45.0"
node-fetch@^2.6.1:
version "2.6.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
oauth-sign@~0.9.0:
version "0.9.0"
resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455"
integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==
once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
p-queue@^6.6.1:
version "6.6.2"
resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-6.6.2.tgz#2068a9dcf8e67dd0ec3e7a2bcb76810faa85e426"
integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==
dependencies:
eventemitter3 "^4.0.4"
p-timeout "^3.2.0"
p-retry@^4.0.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.2.0.tgz#ea9066c6b44f23cab4cd42f6147cdbbc6604da5d"
integrity sha512-jPH38/MRh263KKcq0wBNOGFJbm+U6784RilTmHjB/HM9kH9V8WlCpVUcdOmip9cjXOh6MxZ5yk1z2SjDUJfWmA==
dependencies:
"@types/retry" "^0.12.0"
retry "^0.12.0"
p-timeout@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe"
integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==
dependencies:
p-finally "^1.0.0"
performance-now@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=
process-nextick-args@~1.0.6:
version "1.0.7"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3"
integrity sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M=
psl@^1.1.28:
version "1.8.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24"
integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==
punycode@^2.1.0, punycode@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
qs@~6.5.2:
version "6.5.2"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"
integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==
readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
dependencies:
inherits "^2.0.3"
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
readable-stream@~2.0.0:
version "2.0.6"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.0.6.tgz#8f90341e68a53ccc928788dacfcd11b36eb9b78e"
integrity sha1-j5A0HmilPMySh4jaz80Rs265t44=
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.1"
isarray "~1.0.0"
process-nextick-args "~1.0.6"
string_decoder "~0.10.x"
util-deprecate "~1.0.1"
request@^2.86.0:
version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==
dependencies:
aws-sign2 "~0.7.0"
aws4 "^1.8.0"
caseless "~0.12.0"
combined-stream "~1.0.6"
extend "~3.0.2"
forever-agent "~0.6.1"
form-data "~2.3.2"
har-validator "~5.1.3"
http-signature "~1.2.0"
is-typedarray "~1.0.0"
isstream "~0.1.2"
json-stringify-safe "~5.0.1"
mime-types "~2.1.19"
oauth-sign "~0.9.0"
performance-now "^2.1.0"
qs "~6.5.2"
safe-buffer "^5.1.2"
tough-cookie "~2.5.0"
tunnel-agent "^0.6.0"
uuid "^3.3.2"
retry@^0.12.0:
version "0.12.0"
resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b"
integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=
safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
sax@0.5.x:
version "0.5.8"
resolved "https://registry.yarnpkg.com/sax/-/sax-0.5.8.tgz#d472db228eb331c2506b0e8c15524adb939d12c1"
integrity sha1-1HLbIo6zMcJQaw6MFVJK25OdEsE=
sshpk@^1.7.0:
version "1.16.1"
resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877"
integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==
dependencies:
asn1 "~0.2.3"
assert-plus "^1.0.0"
bcrypt-pbkdf "^1.0.0"
dashdash "^1.12.0"
ecc-jsbn "~0.1.1"
getpass "^0.1.1"
jsbn "~0.1.0"
safer-buffer "^2.0.2"
tweetnacl "~0.14.0"
stream-buffers@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/stream-buffers/-/stream-buffers-3.0.2.tgz#5249005a8d5c2d00b3a32e6e0a6ea209dc4f3521"
integrity sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ==
string_decoder@^1.1.1:
version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
dependencies:
safe-buffer "~5.2.0"
string_decoder@~0.10.x:
version "0.10.31"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94"
integrity sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=
tough-cookie@~2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
dependencies:
psl "^1.1.28"
punycode "^2.1.1"
tunnel-agent@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=
dependencies:
safe-buffer "^5.0.1"
tweetnacl@^0.14.3, tweetnacl@~0.14.0:
version "0.14.5"
resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=
typescript@^4.1.3:
version "4.1.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.3.tgz#519d582bd94cba0cf8934c7d8e8467e473f53bb7"
integrity sha512-B3ZIOf1IKeH2ixgHhj6la6xdwR9QrLC5d1VKeCSY4tvkqhF2eqd9O7txNlS0PO3GrBAFIdr3L1ndNwteUbZLYg==
underscore@~1.8.3:
version "1.8.3"
resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022"
integrity sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI=
universal-user-agent@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee"
integrity sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==
uri-js@^4.2.2:
version "4.4.1"
resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
dependencies:
punycode "^2.1.0"
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
uuid@^3.0.0, uuid@^3.3.2:
version "3.4.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
validator@~9.4.1:
version "9.4.1"
resolved "https://registry.yarnpkg.com/validator/-/validator-9.4.1.tgz#abf466d398b561cd243050112c6ff1de6cc12663"
integrity sha512-YV5KjzvRmSyJ1ee/Dm5UED0G+1L4GZnLN3w6/T+zZm8scVua4sOhYKWTUrKa0H/tMiJyO9QLHMPN+9mB/aMunA==
verror@1.10.0:
version "1.10.0"
resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400"
integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=
dependencies:
assert-plus "^1.0.0"
core-util-is "1.0.2"
extsprintf "^1.2.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
xml2js@0.2.8:
version "0.2.8"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.2.8.tgz#9b81690931631ff09d1957549faf54f4f980b3c2"
integrity sha1-m4FpCTFjH/CdGVdUn69U9PmAs8I=
dependencies:
sax "0.5.x"
xmlbuilder@^9.0.7:
version "9.0.7"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=

View File

@ -57,7 +57,7 @@
"editor-theming": {"assign": []}, "editor-theming": {"assign": []},
"editor-wordnav": {"assign": ["alexdima"]}, "editor-wordnav": {"assign": ["alexdima"]},
"editor-wrapping": {"assign": ["alexdima"]}, "editor-wrapping": {"assign": ["alexdima"]},
"emmet": {"assign": []}, "emmet": {"assign": ["rzhao271"]},
"error-list": {"assign": ["sandy081"]}, "error-list": {"assign": ["sandy081"]},
"explorer-custom": {"assign": ["sandy081"]}, "explorer-custom": {"assign": ["sandy081"]},
"extension-host": {"assign": []}, "extension-host": {"assign": []},
@ -172,7 +172,7 @@
"workbench-tabs": {"assign": ["bpasero"]}, "workbench-tabs": {"assign": ["bpasero"]},
"workbench-touchbar": {"assign": ["bpasero"]}, "workbench-touchbar": {"assign": ["bpasero"]},
"workbench-views": {"assign": ["sbatten"]}, "workbench-views": {"assign": ["sbatten"]},
"workbench-welcome": {"assign": ["chrmarti"]}, "workbench-welcome": {"assign": ["JacksonKearl"]},
"workbench-window": {"assign": ["bpasero"]}, "workbench-window": {"assign": ["bpasero"]},
"workbench-zen": {"assign": ["isidorn"]}, "workbench-zen": {"assign": ["isidorn"]},
"workspace-edit": {"assign": ["jrieken"]}, "workspace-edit": {"assign": ["jrieken"]},

View File

@ -81,7 +81,7 @@
"type": "label", "type": "label",
"name": "*duplicate", "name": "*duplicate",
"action": "close", "action": "close",
"comment": "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!" "comment": "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](${duplicateQuery}). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
}, },
{ {
"type": "comment", "type": "comment",

View File

@ -1,6 +1,6 @@
<!-- Thank you for submitting a Pull Request. Please: <!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines: * Read our Pull Request guidelines:
https://github.com/microsoft/vscode/wiki/How-to-Contribute#pull-requests. https://github.com/microsoft/vscode/wiki/How-to-Contribute#pull-requests
* Associate an issue with the Pull Request. * Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch. * Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them. * Include a description of the proposed changes and how to test them.

View File

@ -17,7 +17,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'author-verification-requested') if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'author-verification-requested')

View File

@ -0,0 +1,36 @@
name: "Build Chat"
on:
workflow_run:
workflows:
- CI
types:
- completed
branches:
- master
- release/*
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js 12.x
uses: actions/setup-node@v1.4.4
with:
node-version: "12.x"
- name: Build
run: yarn install && yarn run build
working-directory: .github/actions/build-chat
- name: Build Chat
uses: ./.github/actions/build-chat
with:
workflow_run_url: ${{ github.event.workflow_run.url }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SLACK_TOKEN: ${{ secrets.SLACK_TOKEN }}
BUILD_CHAT_STORAGE_CONNECTION_STRING: ${{ secrets.BUILD_CHAT_STORAGE_CONNECTION_STRING }}

View File

@ -11,134 +11,269 @@ on:
- release/* - release/*
jobs: jobs:
# linux: windows:
# runs-on: ubuntu-latest name: Windows
# env: runs-on: windows-latest
# CHILD_CONCURRENCY: "1" timeout-minutes: 30
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v1
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
# - run: |
# sudo apt-get update
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
# sudo chmod +x /etc/init.d/xvfb
# sudo update-rc.d xvfb defaults
# sudo service xvfb start
# name: Setup Build Environment
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# # TODO: cache node modules
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn electron x64
# name: Download Electron
# - run: yarn gulp hygiene
# name: Run Hygiene Checks
# - run: yarn monaco-compile-check
# name: Run Monaco Editor Checks
# - run: yarn valid-layers-check
# name: Run Valid Layers Checks
# - run: yarn compile
# name: Compile Sources
# - run: yarn download-builtin-extensions
# name: Download Built-in Extensions
# - run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
# name: Run Unit Tests (Electron)
# - run: DISPLAY=:10 yarn test-browser --browser chromium
# name: Run Unit Tests (Browser)
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# name: Run Integration Tests (Electron)
# windows:
# runs-on: windows-2016
# env:
# CHILD_CONCURRENCY: "1"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v1
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# - uses: actions/setup-python@v1
# with:
# python-version: '2.x'
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn electron
# name: Download Electron
# - run: yarn gulp hygiene
# name: Run Hygiene Checks
# - run: yarn monaco-compile-check
# name: Run Monaco Editor Checks
# - run: yarn valid-layers-check
# name: Run Valid Layers Checks
# - run: yarn compile
# name: Compile Sources
# - run: yarn download-builtin-extensions
# name: Download Built-in Extensions
# - run: .\scripts\test.bat --tfs "Unit Tests"
# name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium
# name: Run Unit Tests (Browser)
# - run: .\scripts\test-integration.bat --tfs "Integration Tests"
# name: Run Integration Tests (Electron)
# darwin:
# runs-on: macos-latest
# env:
# CHILD_CONCURRENCY: "1"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v1
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn electron x64
# name: Download Electron
# - run: yarn gulp hygiene
# name: Run Hygiene Checks
# - run: yarn monaco-compile-check
# name: Run Monaco Editor Checks
# - run: yarn valid-layers-check
# name: Run Valid Layers Checks
# - run: yarn compile
# name: Compile Sources
# - run: yarn download-builtin-extensions
# name: Download Built-in Extensions
# - run: ./scripts/test.sh --tfs "Unit Tests"
# name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium --browser webkit
# name: Run Unit Tests (Browser)
# - run: ./scripts/test-integration.sh --tfs "Integration Tests"
# name: Run Integration Tests (Electron)
monaco:
runs-on: ubuntu-latest
env: env:
CHILD_CONCURRENCY: "1" CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 12
- uses: actions/setup-python@v2
with:
python-version: "2.x"
- name: Compute node modules cache key
id: nodeModulesCacheKey
run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
- name: Cache node_modules archive
id: cacheNodeModules
uses: actions/cache@v2
with:
path: ".build/node_modules_cache"
key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
- name: Extract node_modules archive
if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
run: 7z.exe x .build/node_modules_cache/cache.7z -aos
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Cache yarn directory
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
uses: actions/cache@v2
with:
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Create node_modules archive
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: |
mkdir -Force .build
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -Force .build/node_modules_cache
7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
- name: Run Unit Tests (Browser)
run: yarn test-browser --browser chromium
- name: Run Integration Tests (Electron)
run: .\scripts\test-integration.bat
linux:
name: Linux
runs-on: ubuntu-latest
timeout-minutes: 30
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: | - name: Setup Build Environment
run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1 sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults sudo update-rc.d xvfb defaults
sudo service xvfb start sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1 - uses: actions/setup-node@v2
with: with:
node-version: 10 node-version: 12
- run: yarn --frozen-lockfile
name: Install Dependencies - name: Compute node modules cache key
- run: yarn monaco-compile-check id: nodeModulesCacheKey
name: Run Monaco Editor Checks run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
- run: yarn gulp editor-esm-bundle - name: Cache node modules
name: Editor Distro & ESM Bundle id: cacheNodeModules
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules11-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules11-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Cache yarn directory
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
uses: actions/cache@v2
with:
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
id: electron-unit-tests
run: DISPLAY=:10 ./scripts/test.sh
- name: Run Unit Tests (Browser)
id: browser-unit-tests
run: DISPLAY=:10 yarn test-browser --browser chromium
- name: Run Integration Tests (Electron)
id: electron-integration-tests
run: DISPLAY=:10 ./scripts/test-integration.sh
darwin:
name: macOS
runs-on: macos-latest
timeout-minutes: 30
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 12
- name: Compute node modules cache key
id: nodeModulesCacheKey
run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
- name: Cache node modules
id: cacheNodeModules
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules11-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules11-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Cache yarn directory
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
uses: actions/cache@v2
with:
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: DISPLAY=:10 ./scripts/test.sh
- name: Run Unit Tests (Browser)
run: DISPLAY=:10 yarn test-browser --browser chromium
- name: Run Integration Tests (Electron)
run: DISPLAY=:10 ./scripts/test-integration.sh
hygiene:
name: Hygiene, Layering and Monaco Editor
runs-on: ubuntu-latest
timeout-minutes: 30
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 12
- name: Compute node modules cache key
id: nodeModulesCacheKey
run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
- name: Cache node modules
id: cacheNodeModules
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules11-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules11-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Cache yarn directory
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
uses: actions/cache@v2
with:
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Run Hygiene Checks
run: yarn gulp hygiene
- name: Run Valid Layers Checks
run: yarn valid-layers-check
- name: Run Monaco Editor Checks
run: yarn monaco-compile-check
- name: Editor Distro & ESM Bundle
run: yarn gulp editor-esm-bundle
- name: Typings validation prep
run: |
mkdir typings-test
- name: Typings validation
working-directory: ./typings-test
run: |
yarn init -yp
../node_modules/.bin/tsc --init
echo "import '../out-monaco-editor-core';" > a.ts
../node_modules/.bin/tsc --noEmit
- name: Webpack Editor
working-directory: ./test/monaco
run: yarn run bundle
- name: Compile Editor Tests
working-directory: ./test/monaco
run: yarn run compile
- name: Download Playwright
run: yarn playwright-install
- name: Run Editor Tests
timeout-minutes: 5
working-directory: ./test/monaco
run: yarn test

View File

@ -13,7 +13,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions
- name: Run Commands - name: Run Commands

View File

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
if: contains(github.event.issue.labels.*.name, '*english-please') if: contains(github.event.issue.labels.*.name, '*english-please')

View File

@ -18,7 +18,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'feature-request') if: github.event_name != 'issues' || contains(github.event.issue.labels.*.name, 'feature-request')
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -14,7 +14,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions
- name: Install Storage Module - name: Install Storage Module

View File

@ -14,7 +14,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions
- name: Run Locker - name: Run Locker

View File

@ -14,7 +14,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions
- name: Run Needs More Info Closer - name: Run Needs More Info Closer

View File

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Install Actions - name: Install Actions
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

View File

@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2 uses: actions/checkout@v2
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
ref: v41 ref: v42
path: ./actions path: ./actions
- name: Checkout Repo - name: Checkout Repo
if: github.event_name != 'issues' if: github.event_name != 'issues'

View File

@ -14,7 +14,7 @@ jobs:
with: with:
repository: "microsoft/vscode-github-triage-actions" repository: "microsoft/vscode-github-triage-actions"
path: ./actions path: ./actions
ref: v41 ref: v42
- name: Install Actions - name: Install Actions
if: contains(github.event.issue.labels.*.name, 'testplan-item') || contains(github.event.issue.labels.*.name, 'invalid-testplan-item') if: contains(github.event.issue.labels.*.name, 'testplan-item') || contains(github.event.issue.labels.*.name, 'invalid-testplan-item')
run: npm install --production --prefix ./actions run: npm install --production --prefix ./actions

23
lib/vscode/.gitignore vendored
View File

@ -5,26 +5,9 @@ Thumbs.db
node_modules/ node_modules/
.build/ .build/
extensions/**/dist/ extensions/**/dist/
out/ /out*/
out-build/ /extensions/**/out/
out-editor/ # src/vs/server NOTE@coder: So our code isn't ignored.
out-editor-src/
out-editor-build/
out-editor-esm/
out-editor-esm-bundle/
out-editor-min/
out-monaco-editor-core/
out-vscode/
out-vscode-min/
out-vscode-reh/
out-vscode-reh-min/
out-vscode-reh-pkg/
out-vscode-reh-web/
out-vscode-reh-web-min/
out-vscode-reh-web-pkg/
out-vscode-web/
out-vscode-web-min/
out-vscode-web-pkg/
resources/server resources/server
build/node_modules build/node_modules
coverage/ coverage/

View File

@ -3,7 +3,6 @@
// for the documentation about the extensions.json format // for the documentation about the extensions.json format
"recommendations": [ "recommendations": [
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig", "EditorConfig.EditorConfig"
"msjsdiag.debugger-for-chrome"
] ]
} }

View File

@ -203,7 +203,7 @@
{ {
"type": "pwa-chrome", "type": "pwa-chrome",
"request": "launch", "request": "launch",
"name": "Launch VS Code", "name": "Launch VS Code Internal",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat" "runtimeExecutable": "${workspaceFolder}/scripts/code.bat"
}, },
@ -235,7 +235,9 @@
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
], ],
"browserLaunchLocation": "workspace", "browserLaunchLocation": "workspace",
"preLaunchTask": "Ensure Prelaunch Dependencies", "presentation": {
"hidden": true,
}
}, },
{ {
"type": "node", "type": "node",
@ -472,7 +474,7 @@
"name": "VS Code", "name": "VS Code",
"stopAll": true, "stopAll": true,
"configurations": [ "configurations": [
"Launch VS Code", "Launch VS Code Internal",
"Attach to Main Process", "Attach to Main Process",
"Attach to Extension Host", "Attach to Extension Host",
"Attach to Shared Process", "Attach to Shared Process",
@ -486,7 +488,7 @@
{ {
"name": "Search and Renderer processes", "name": "Search and Renderer processes",
"configurations": [ "configurations": [
"Launch VS Code", "Launch VS Code Internal",
"Attach to Search Process" "Attach to Search Process"
], ],
"presentation": { "presentation": {
@ -497,7 +499,7 @@
{ {
"name": "Renderer and Extension Host processes", "name": "Renderer and Extension Host processes",
"configurations": [ "configurations": [
"Launch VS Code", "Launch VS Code Internal",
"Attach to Extension Host" "Attach to Extension Host"
], ],
"presentation": { "presentation": {
@ -526,6 +528,14 @@
"group": "1_vscode", "group": "1_vscode",
"order": 2 "order": 2
} }
} },
{
"name": "Launch VS Code",
"stopAll": true,
"configurations": [
"Launch VS Code Internal",
],
"preLaunchTask": "Ensure Prelaunch Dependencies"
},
] ]
} }

View File

@ -8,7 +8,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"November 2020\"", "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"January 2021\"",
"editable": true "editable": true
}, },
{ {

View File

@ -8,8 +8,8 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"", "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"January 2021\"",
"editable": false "editable": true
}, },
{ {
"kind": 1, "kind": 1,
@ -92,7 +92,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found", "value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased",
"editable": true "editable": true
}, },
{ {
@ -104,7 +104,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE label:candidate", "value": "$REPOS $MILESTONE is:open label:candidate",
"editable": true "editable": true
} }
] ]

View File

@ -8,8 +8,8 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"\n\n$MINE=assignee:@me", "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"January 2021\"\n\n$MINE=assignee:@me",
"editable": false "editable": true
}, },
{ {
"kind": 1, "kind": 1,
@ -176,7 +176,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found", "value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true "editable": true
}, },
{ {
@ -188,7 +188,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand", "value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15",
"editable": true "editable": true
}, },
{ {
@ -200,7 +200,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found", "value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true "editable": true
}, },
{ {

View File

@ -8,7 +8,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"November 2020\"", "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"January 2021\"",
"editable": true "editable": true
}, },
{ {
@ -83,6 +83,24 @@
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"", "value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
"editable": false "editable": false
}, },
{
"kind": 1,
"language": "markdown",
"value": "### Personal Inbox\n",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "\n#### Missing Type label",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream",
"editable": true
},
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",

View File

@ -0,0 +1,44 @@
[
{
"kind": 1,
"language": "markdown",
"value": "## Papercuts\n\nThis notebook serves as an ongoing collection of paper cut issues that we encounter while dogfooding. With that in mind only promote issues that really turn you off, e.g issues that make you wanna stop use VS Code or its extensions. To mark an issue (bug, feature-request, etc) as paper cut add the labels: `papercut :drop_of_blood:`",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## All Papercuts\n\nThese are all papercut issues that we encounter while dogfoodingf vscode or extensions that we author. ",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open -label:notebook label:\"papercut :drop_of_blood:\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## Native Notebook",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open label:notebook label:\"papercut :drop_of_blood:\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "### My Papercuts",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
"editable": true
}
]

View File

@ -14,7 +14,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"November 2020\"", "value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"January 2021\"",
"editable": true "editable": true
}, },
{ {

View File

@ -71,10 +71,12 @@
"files.insertFinalNewline": false, "files.insertFinalNewline": false,
}, },
"[typescript]": { "[typescript]": {
"editor.defaultFormatter": "vscode.typescript-language-features" "editor.defaultFormatter": "vscode.typescript-language-features",
"editor.formatOnSave": true,
}, },
"[javascript]": { "[javascript]": {
"editor.defaultFormatter": "vscode.typescript-language-features" "editor.defaultFormatter": "vscode.typescript-language-features",
"editor.formatOnSave": true,
}, },
"typescript.tsc.autoDetect": "off" "typescript.tsc.autoDetect": "off"
} }

View File

@ -62,7 +62,8 @@
"group": { "group": {
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true
} },
"problemMatcher": []
}, },
{ {
"type": "npm", "type": "npm",
@ -90,7 +91,8 @@
"Kill Build VS Code Core", "Kill Build VS Code Core",
"Kill Build VS Code Extensions" "Kill Build VS Code Extensions"
], ],
"group": "build" "group": "build",
"problemMatcher": []
}, },
{ {
"type": "npm", "type": "npm",
@ -203,7 +205,7 @@
"base": "$tsc", "base": "$tsc",
"applyTo": "allDocuments", "applyTo": "allDocuments",
"owner": "tsec" "owner": "tsec"
}, }
], ],
"group": "build", "group": "build",
"label": "npm: tsec-compile-check", "label": "npm: tsec-compile-check",

3
lib/vscode/.yarnrc Normal file
View File

@ -0,0 +1,3 @@
disturl "https://electronjs.org/headers"
target "11.2.1"
runtime "electron"

View File

@ -1,5 +1,4 @@
# Visual Studio Code - Open Source ("Code - OSS") # Visual Studio Code - Open Source ("Code - OSS")
[![Build Status](https://dev.azure.com/vscode/VSCode/_apis/build/status/VS%20Code?branchName=master)](https://aka.ms/vscode-builds)
[![Feature Requests](https://img.shields.io/github/issues/microsoft/vscode/feature-request.svg)](https://github.com/microsoft/vscode/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc) [![Feature Requests](https://img.shields.io/github/issues/microsoft/vscode/feature-request.svg)](https://github.com/microsoft/vscode/issues?q=is%3Aopen+is%3Aissue+label%3Afeature-request+sort%3Areactions-%2B1-desc)
[![Bugs](https://img.shields.io/github/issues/microsoft/vscode/bug.svg)](https://github.com/microsoft/vscode/issues?utf8=✓&q=is%3Aissue+is%3Aopen+label%3Abug) [![Bugs](https://img.shields.io/github/issues/microsoft/vscode/bug.svg)](https://github.com/microsoft/vscode/issues?utf8=✓&q=is%3Aissue+is%3Aopen+label%3Abug)
[![Gitter](https://img.shields.io/badge/chat-on%20gitter-yellow.svg)](https://gitter.im/Microsoft/vscode) [![Gitter](https://img.shields.io/badge/chat-on%20gitter-yellow.svg)](https://gitter.im/Microsoft/vscode)

View File

@ -28,23 +28,23 @@ This project incorporates components from the projects listed below. The origina
21. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site) 21. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
22. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar) 22. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
23. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar) 23. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
24. jeff-hykin/cpp-textmate-grammar version 1.15.3 (https://github.com/jeff-hykin/cpp-textmate-grammar) 24. jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
25. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify) 25. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
26. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert) 26. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
27. language-docker (https://github.com/moby/moby) 27. language-docker (https://github.com/moby/moby)
28. language-less version 0.34.2 (https://github.com/atom/language-less) 28. language-less version 0.34.2 (https://github.com/atom/language-less)
29. language-php version 0.44.5 (https://github.com/atom/language-php) 29. language-php version 0.46.0 (https://github.com/atom/language-php)
30. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython) 30. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
31. marked version 1.1.0 (https://github.com/markedjs/marked) 31. marked version 1.1.0 (https://github.com/markedjs/marked)
32. mdn-data version 1.1.12 (https://github.com/mdn/data) 32. mdn-data version 1.1.12 (https://github.com/mdn/data)
33. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage) 33. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
34. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage) 34. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
35. microsoft/vscode-markdown-tm-grammar (https://github.com/microsoft/vscode-markdown-tm-grammar) 35. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
36. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql) 36. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
37. mmims/language-batchfile version 0.7.5 (https://github.com/mmims/language-batchfile) 37. mmims/language-batchfile version 0.7.5 (https://github.com/mmims/language-batchfile)
38. octref/language-css version 0.42.11 (https://github.com/octref/language-css) 38. octref/language-css version 0.42.11 (https://github.com/octref/language-css)
39. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax) 39. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
40. rust-syntax version 0.2.13 (https://github.com/dustypomerleau/rust-syntax) 40. rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
41. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui) 41. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
42. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage) 42. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
43. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle) 43. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
@ -64,7 +64,7 @@ This project incorporates components from the projects listed below. The origina
57. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage) 57. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
58. Unicode version 12.0.0 (https://home.unicode.org/) 58. Unicode version 12.0.0 (https://home.unicode.org/)
59. vscode-codicons version 0.0.1 (https://github.com/microsoft/vscode-codicons) 59. vscode-codicons version 0.0.1 (https://github.com/microsoft/vscode-codicons)
60. vscode-logfile-highlighter version 2.8.0 (https://github.com/emilast/vscode-logfile-highlighter) 60. vscode-logfile-highlighter version 2.9.0 (https://github.com/emilast/vscode-logfile-highlighter)
61. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift) 61. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
62. Web Background Synchronization (https://github.com/WICG/background-sync) 62. Web Background Synchronization (https://github.com/WICG/background-sync)

View File

@ -1,23 +0,0 @@
jobs:
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: build/azure-pipelines/win32/continuous-build-win32.yml
- job: Linux
pool:
vmImage: 'Ubuntu-16.04'
steps:
- template: build/azure-pipelines/linux/continuous-build-linux.yml
- job: macOS
pool:
vmImage: macOS-latest
steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
trigger:
branches:
exclude:
- electron-11.x.y

View File

@ -1 +1 @@
2020-11-30T16:21:34.566Z 2021-01-28T11:52:11.376Z

View File

@ -1,3 +0,0 @@
azure-pipelines/**/*.js
darwin/**/*.js
lib/**/*.js

View File

@ -1,2 +0,0 @@
node_modules/
*.js

View File

@ -0,0 +1,25 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
const { dirs } = require('../../npm/dirs');
const ROOT = path.join(__dirname, '../../../');
const shasum = crypto.createHash('sha1');
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `yarn.lock` files
for (let dir of dirs) {
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
shasum.update(fs.readFileSync(yarnLockPath));
}
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@ -0,0 +1,32 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
const { dirs } = require('../../npm/dirs');
const ROOT = path.join(__dirname, '../../../');
const shasum = crypto.createHash('sha1');
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `yarn.lock` files
for (let dir of dirs) {
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
shasum.update(fs.readFileSync(yarnLockPath));
}
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@ -0,0 +1,94 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const crypto = require("crypto");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
if (process.argv.length !== 6) {
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
process.exit(-1);
}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
async function doesAssetExist(blobService, quality, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, quality, blobName, filePath, fileName) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(filePath),
contentDisposition: `attachment; filename="${fileName}"`,
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
}
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main() {
const [, , platform, type, fileName, filePath] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
const stat = await new Promise((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(filePath);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + fileName;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const blobExists = await doesAssetExist(blobService, quality, blobName);
if (blobExists) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await uploadBlob(blobService, quality, blobName, filePath, fileName);
console.log('Blobs successfully uploaded.');
const asset = {
platform,
type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
}
main().then(() => {
console.log('Asset successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,51 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION');
process.exit(-1);
}
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main() {
const [, , _version] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
const queuedBy = getEnv('BUILD_QUEUEDBY');
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
console.log('Creating build...');
console.log('Quality:', quality);
console.log('Version:', version);
console.log('Commit:', commit);
const build = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: false,
sourceBranch,
queuedBy,
assets: [],
updates: {}
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@ -10,8 +10,8 @@ git clone --depth 1 https://github.com/microsoft/vscode-node-debug2.git
git clone --depth 1 https://github.com/microsoft/vscode-node-debug.git git clone --depth 1 https://github.com/microsoft/vscode-node-debug.git
git clone --depth 1 https://github.com/microsoft/vscode-html-languageservice.git git clone --depth 1 https://github.com/microsoft/vscode-html-languageservice.git
git clone --depth 1 https://github.com/microsoft/vscode-json-languageservice.git git clone --depth 1 https://github.com/microsoft/vscode-json-languageservice.git
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o . node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json

View File

@ -0,0 +1,14 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const retry_1 = require("./retry");
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() {
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
}
install();

View File

@ -0,0 +1,15 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as path from 'path';
import { retry } from './retry';
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() {
await retry(() => installBrowsersWithProgressBar(playwrightPath));
}
install();

View File

@ -0,0 +1,40 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
process.exit(-1);
}
const ROOT = path.join(__dirname, '../../../');
function findNodeModulesFiles(location, inNodeModules, result) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
continue;
}
let stat;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
}
catch (err) {
continue;
}
if (stat.isDirectory()) {
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
}
else {
if (inNodeModules) {
result.push(entryPath.substr(1));
}
}
}
}
const result = [];
findNodeModulesFiles('', false, result);
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');

View File

@ -0,0 +1,46 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
process.exit(-1);
}
const ROOT = path.join(__dirname, '../../../');
function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
continue;
}
let stat: fs.Stats;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
} catch (err) {
continue;
}
if (stat.isDirectory()) {
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
} else {
if (inNodeModules) {
result.push(entryPath.substr(1));
}
}
}
}
const result: string[] = [];
findNodeModulesFiles('', false, result);
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');

View File

@ -0,0 +1,71 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const path_1 = require("path");
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService, container) {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService, container, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, container, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, files) {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@ -0,0 +1,50 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
async function getConfig(client, quality) {
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
const res = await client.database('builds').container('config').items.query(query).fetchAll();
if (res.resources.length === 0) {
return createDefaultConfig(quality);
}
return res.resources[0];
}
async function main() {
const commit = getEnv('BUILD_SOURCEVERSION');
const quality = getEnv('VSCODE_QUALITY');
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const config = await getConfig(client, quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,25 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.retry = void 0;
async function retry(fn) {
for (let run = 1; run <= 10; run++) {
try {
return await fn();
}
catch (err) {
if (!/ECONNRESET/.test(err.message)) {
throw err;
}
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
// maximum delay is 10th retry: ~3 seconds
await new Promise(c => setTimeout(c, millis));
}
}
throw new Error('Retried too many times');
}
exports.retry = retry;

View File

@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const url = require("url");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
function log(...args) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
}
function error(...args) {
console.error(...[`[${new Date().toISOString()}]`, ...args]);
}
if (process.argv.length < 3) {
error('Usage: node sync-mooncake.js <quality>');
process.exit(-1);
}
async function sync(commit, quality) {
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const container = client.database('builds').container(quality);
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
const res = await container.items.query(query, {}).fetchAll();
if (res.resources.length !== 1) {
throw new Error(`No builds found for ${commit}`);
}
const build = res.resources[0];
log(`Found build for ${commit}, with ${build.assets.length} assets`);
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
for (const asset of build.assets) {
try {
const blobPath = url.parse(asset.url).path;
if (!blobPath) {
throw new Error(`Failed to parse URL: ${asset.url}`);
}
const blobName = blobPath.replace(/^\/\w+\//, '');
log(`Found ${blobName}`);
if (asset.mooncakeUrl) {
log(` Already in Mooncake ✔️`);
continue;
}
const readStream = blobService.createReadStream(quality, blobName, undefined);
const blobOptions = {
contentSettings: {
contentType: mime.lookup(blobPath),
cacheControl: 'max-age=31536000, public'
}
};
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
log(` Uploading to Mooncake...`);
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
log(` Done ✔️`);
}
catch (err) {
error(err);
}
}
log(`All done ✔️`);
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
error('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const quality = process.argv[2];
sync(commit, quality).catch(err => {
error(err);
process.exit(1);
});
}
main();

View File

@ -1,79 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- script: |
yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run Unit Tests (Browser)
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@ -1,32 +1,7 @@
steps: steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets" displayName: "Azure Key Vault: Get Secrets"
@ -34,9 +9,21 @@ steps:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: | - script: |
set -e set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
cat << EOF > ~/.netrc cat << EOF > ~/.netrc
machine github.com machine github.com
login vscode login vscode
@ -55,40 +42,45 @@ steps:
- script: | - script: |
set -e set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro displayName: Merge distro
- script: | - script: |
npx https://aka.ms/enablesecurefeed standAlone mkdir -p .build
displayName: Switch to Terrapin packages node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
displayName: Prepare yarn cache flags displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: Cache@2
inputs: inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" path: .build/node_modules_cache
vstsFeed: "npm-vscode" cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: | - script: |
set -e set -e
npm install -g node-gyp@7.1.0 tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
set -e
npm install -g node-gyp@latest
node-gyp --version node-gyp --version
displayName: Update node-gyp displayName: Update node-gyp
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: | - script: |
set -e set -e
export npm_config_arch=$(VSCODE_ARCH) export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp) export npm_config_node_gyp=$(which node-gyp)
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break yarn --frozen-lockfile && break
@ -98,25 +90,19 @@ steps:
fi fi
echo "Yarn failed $i, trying again..." echo "Yarn failed $i, trying again..."
done done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
set -e set -e
export npm_config_arch=$(VSCODE_ARCH) node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
export npm_config_node_gyp=$(which node-gyp) mkdir -p .build/node_modules_cache
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/ condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
yarn postinstall displayName: Create node_modules archive
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: | - script: |
set -e set -e
@ -126,10 +112,21 @@ steps:
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/ ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn electron-rebuild yarn electron-rebuild
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
cd ./node_modules/keytar cd ./node_modules/keytar
node-gyp rebuild node-gyp rebuild
displayName: Rebuild native modules for ARM64 displayName: Rebuild native modules for ARM64
condition: eq(variables['VSCODE_ARCH'], 'arm64') condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- download: current
artifact: vscode-darwin-x64
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
artifact: vscode-darwin-arm64
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: | - script: |
set -e set -e
@ -141,6 +138,7 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
displayName: Build displayName: Build
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: | - script: |
set -e set -e
@ -148,14 +146,23 @@ steps:
yarn gulp vscode-reh-darwin-min-ci yarn gulp vscode-reh-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-darwin-min-ci yarn gulp vscode-reh-web-darwin-min-ci
displayName: Build reh displayName: Build Server
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: | - script: |
set -e set -e
yarn electron $(VSCODE_ARCH) unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
displayName: Download Electron unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) DEBUG=* node build/darwin/create-universal-app.js
displayName: Create Universal App
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
@ -167,17 +174,26 @@ steps:
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
displayName: Set Hardened Entitlements displayName: Set Hardened Entitlements
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
./scripts/test.sh --build --tfs "Unit Tests" ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron) displayName: Run unit tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests" yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser) displayName: Run unit tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@ -191,13 +207,15 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests" ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron) displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit ./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser) displayName: Run integration tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@ -208,22 +226,29 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron) displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/smoke compile
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
continueOnError: true timeoutInMinutes: 5
displayName: Run smoke tests (Electron) displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
yarn smoketest --web --headless yarn smoketest-no-compile --web --headless
continueOnError: true timeoutInMinutes: 5
displayName: Run smoke tests (Browser) displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@ -240,12 +265,13 @@ steps:
inputs: inputs:
testResultsFiles: "*-results.xml" testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed() condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
displayName: Archive build displayName: Archive build
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs: inputs:
@ -270,10 +296,12 @@ steps:
] ]
SessionTimeout: 60 SessionTimeout: 60
displayName: Codesign displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg" zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
displayName: Clean Archive displayName: Clean
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
@ -281,6 +309,7 @@ steps:
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier") BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER" echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier displayName: Export bundle identifier
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs: inputs:
@ -305,6 +334,7 @@ steps:
] ]
SessionTimeout: 60 SessionTimeout: 60
displayName: Notarization displayName: Notarization
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
@ -312,7 +342,7 @@ steps:
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build "$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration) displayName: Verify start after signing (export configuration)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64')) condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
@ -323,13 +353,29 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \ VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/darwin/publish.sh ./build/azure-pipelines/darwin/publish.sh
displayName: Publish displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
artifact: vscode-darwin-$(VSCODE_ARCH)
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
artifact: vscode-server-darwin-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \ AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \ VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search) displayName: Upload configuration (for Bing settings search)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0

View File

@ -5,6 +5,7 @@ set -e
case $VSCODE_ARCH in case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;; x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;; arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac esac
# publish the build # publish the build

View File

@ -8,7 +8,7 @@ pr:
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets" displayName: "Azure Key Vault: Get Secrets"

View File

@ -2,11 +2,12 @@ pool:
vmImage: "Ubuntu-16.04" vmImage: "Ubuntu-16.04"
trigger: none trigger: none
pr: none
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets" displayName: "Azure Key Vault: Get Secrets"

View File

@ -22,7 +22,7 @@ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz" SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME" SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.* rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME) (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH" node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"

View File

@ -1,92 +0,0 @@
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- script: |
DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
displayName: Run Unit Tests (Browser)
- script: |
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-linux
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@ -1,28 +1,7 @@
steps: steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- task: Docker@1 - task: Docker@1
displayName: "Pull image" displayName: "Pull image"
inputs: inputs:
@ -45,7 +35,6 @@ steps:
- script: | - script: |
set -e set -e
cat << EOF > ~/.netrc cat << EOF > ~/.netrc
machine github.com machine github.com
login vscode login vscode
@ -58,30 +47,35 @@ steps:
- script: | - script: |
set -e set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro displayName: Merge distro
- script: | - script: |
npx https://aka.ms/enablesecurefeed standAlone mkdir -p .build
displayName: Switch to Terrapin packages node build/azure-pipelines/common/computeNodeModulesCacheKey.js "alpine" $ENABLE_TERRAPIN > .build/yarnlockhash
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n "alpine" > .build/arch
displayName: Prepare yarn cache flags displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: Cache@2
inputs: inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" path: .build/node_modules_cache
vstsFeed: "npm-vscode" cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: | - script: |
set -e set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then if [ $i -eq 3 ]; then
@ -90,21 +84,19 @@ steps:
fi fi
echo "Yarn failed $i, trying again..." echo "Yarn failed $i, trying again..."
done done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
set -e set -e
yarn postinstall node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
displayName: Run postinstall scripts mkdir -p .build/node_modules_cache
condition: and(succeeded(), eq(variables['CacheRestored'], 'true')) tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: | - script: |
set -e set -e
@ -113,7 +105,7 @@ steps:
- script: | - script: |
set -e set -e
docker run -e VSCODE_QUALITY -e CHILD_CONCURRENCY=1 -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh docker run -e VSCODE_QUALITY -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
displayName: Prebuild displayName: Prebuild
- script: | - script: |
@ -129,6 +121,17 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/linux/alpine/publish.sh ./build/azure-pipelines/linux/alpine/publish.sh
displayName: Publish displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
artifact: vscode-server-linux-alpine
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
artifact: vscode-server-linux-alpine-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection" displayName: "Component Detection"

View File

@ -1,28 +1,7 @@
steps: steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- script: | - script: |
set -e set -e
cat << EOF > ~/.netrc cat << EOF > ~/.netrc
@ -48,31 +38,56 @@ steps:
- script: | - script: |
set -e set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro displayName: Merge distro
- script: | - script: |
npx https://aka.ms/enablesecurefeed standAlone mkdir -p .build
displayName: Switch to Terrapin packages node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
displayName: Prepare yarn cache flags displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: Cache@2
inputs: inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" path: .build/node_modules_cache
vstsFeed: "npm-vscode" cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
set -e
npm install -g node-gyp@latest
node-gyp --version
displayName: Update node-gyp
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: | - script: |
set -e set -e
export npm_config_arch=$(NPM_ARCH) export npm_config_arch=$(NPM_ARCH)
export CHILD_CONCURRENCY="1" export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5)
export CXX=$(which g++-5)
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
fi
for i in {1..3}; do # try 3 times, for Terrapin for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then if [ $i -eq 3 ]; then
@ -81,21 +96,23 @@ steps:
fi fi
echo "Yarn failed $i, trying again..." echo "Yarn failed $i, trying again..."
done done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 # remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
inputs: cd ./node_modules/keytar
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" npx node-gyp rebuild
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" env:
vstsFeed: "npm-vscode" ELECTRON_SKIP_BINARY_DOWNLOAD: 1
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: | - script: |
set -e set -e
yarn postinstall node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
displayName: Run postinstall scripts mkdir -p .build/node_modules_cache
condition: and(succeeded(), eq(variables['CacheRestored'], 'true')) tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: | - script: |
set -e set -e
@ -106,28 +123,41 @@ steps:
set -e set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build displayName: Build
- script: | - script: |
set -e set -e
service xvfb start VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
displayName: Start xvfb yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build Server
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron) displayName: Run unit tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests" DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser) displayName: Run unit tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@ -142,6 +172,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests" DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron) displayName: Run integration tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@ -149,6 +180,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser) displayName: Run integration tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@ -160,6 +192,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron) displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
@ -182,17 +215,20 @@ steps:
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb" yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm" yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
displayName: Build deb, rpm packages displayName: Build deb, rpm packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap" yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
displayName: Prepare snap package displayName: Prepare snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# needed for code signing # needed for code signing
- task: UseDotNet@2 - task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x" displayName: "Install .NET Core SDK 2.x"
inputs: inputs:
version: 2.x version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs: inputs:
@ -212,6 +248,7 @@ steps:
] ]
SessionTimeout: 120 SessionTimeout: 120
displayName: Codesign rpm displayName: Codesign rpm
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
@ -221,12 +258,34 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \ VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/linux/publish.sh ./build/azure-pipelines/linux/publish.sh
displayName: Publish displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(DEB_PATH)
artifact: vscode-linux-deb-$(VSCODE_ARCH)
displayName: Publish deb package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(RPM_PATH)
artifact: vscode-linux-rpm-$(VSCODE_ARCH)
displayName: Publish rpm package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
artifact: vscode-server-linux-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
artifact: vscode-server-linux-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
displayName: "Publish Pipeline Artifact" displayName: "Publish Pipeline Artifact"
inputs: inputs:
artifactName: "snap-$(VSCODE_ARCH)" artifactName: "snap-$(VSCODE_ARCH)"
targetPath: .build/linux/snap-tarball targetPath: .build/linux/snap-tarball
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection" displayName: "Component Detection"

View File

@ -26,6 +26,17 @@ rm -rf $ROOT/vscode-server-*.tar.*
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH" node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish DEB # Publish DEB
case $VSCODE_ARCH in case $VSCODE_ARCH in
x64) DEB_ARCH="amd64" ;; x64) DEB_ARCH="amd64" ;;
@ -58,3 +69,7 @@ mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz" SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap) (cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
# Export DEB_PATH, RPM_PATH
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"

View File

@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
@ -54,3 +54,11 @@ steps:
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \ AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \ AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH" node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
# Export SNAP_PATH
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
- publish: $(SNAP_PATH)
artifact: vscode-linux-snap-$(VSCODE_ARCH)
displayName: Publish snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@ -0,0 +1,58 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const json = require("gulp-json-editor");
const buffer = require('gulp-buffer');
const filter = require("gulp-filter");
const es = require("event-stream");
const vfs = require("vinyl-fs");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const fs = require("fs");
const path = require("path");
function main() {
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
console.log('Missing VSCODE_QUALITY, skipping mixin');
return;
}
const productJsonFilter = filter(f => f.relative === 'product.json', { restore: true });
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
return vfs
.src(`quality/${quality}/**`, { base: `quality/${quality}` })
.pipe(filter(f => !f.isDirectory()))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json((o) => {
const ossProduct = JSON.parse(fs.readFileSync(path.join(__dirname, '..', '..', 'product.json'), 'utf8'));
let builtInExtensions = ossProduct.builtInExtensions;
if (Array.isArray(o.builtInExtensions)) {
fancyLog(ansiColors.blue('[mixin]'), 'Overwriting built-in extensions:', o.builtInExtensions.map(e => e.name));
builtInExtensions = o.builtInExtensions;
}
else if (o.builtInExtensions) {
const include = o.builtInExtensions['include'] || [];
const exclude = o.builtInExtensions['exclude'] || [];
fancyLog(ansiColors.blue('[mixin]'), 'OSS built-in extensions:', builtInExtensions.map(e => e.name));
fancyLog(ansiColors.blue('[mixin]'), 'Including built-in extensions:', include.map(e => e.name));
fancyLog(ansiColors.blue('[mixin]'), 'Excluding built-in extensions:', exclude);
builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name));
builtInExtensions = [...builtInExtensions, ...include];
fancyLog(ansiColors.blue('[mixin]'), 'Final built-in extensions:', builtInExtensions.map(e => e.name));
}
else {
fancyLog(ansiColors.blue('[mixin]'), 'Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name));
}
return Object.assign(Object.assign({ webBuiltInExtensions: ossProduct.webBuiltInExtensions }, o), { builtInExtensions });
}))
.pipe(productJsonFilter.restore)
.pipe(es.mapSync(function (f) {
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
return f;
}))
.pipe(vfs.dest('.'));
}
main();

View File

@ -1,4 +1,3 @@
trigger: none
pr: none pr: none
schedules: schedules:
@ -8,11 +7,108 @@ schedules:
include: include:
- master - master
parameters:
- name: VSCODE_QUALITY
displayName: Quality
type: string
default: insider
values:
- exploration
- insider
- stable
- name: ENABLE_TERRAPIN
displayName: "Enable Terrapin"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32
displayName: "🎯 Windows x64"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32_32BIT
displayName: "🎯 Windows ia32"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32_ARM64
displayName: "🎯 Windows arm64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX
displayName: "🎯 Linux x64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ARM64
displayName: "🎯 Linux arm64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ARMHF
displayName: "🎯 Linux armhf"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ALPINE
displayName: "🎯 Alpine Linux"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS
displayName: "🎯 macOS x64"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS_ARM64
displayName: "🎯 macOS arm64"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS_UNIVERSAL
displayName: "🎯 macOS universal"
type: boolean
default: true
- name: VSCODE_BUILD_WEB
displayName: "🎯 Web"
type: boolean
default: true
- name: VSCODE_PUBLISH
displayName: "Publish to builds.code.visualstudio.com"
type: boolean
default: true
- name: VSCODE_RELEASE
displayName: "Release build if successful"
type: boolean
default: false
- name: VSCODE_COMPILE_ONLY
displayName: "Run Compile stage exclusively"
type: boolean
default: false
- name: VSCODE_STEP_ON_IT
displayName: "Skip tests"
type: boolean
default: false
variables:
- name: ENABLE_TERRAPIN
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: VSCODE_QUALITY
value: ${{ parameters.VSCODE_QUALITY }}
- name: VSCODE_BUILD_STAGE_WINDOWS
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
- name: VSCODE_BUILD_STAGE_LINUX
value: ${{ or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true), eq(parameters.VSCODE_BUILD_WEB, true)) }}
- name: VSCODE_BUILD_STAGE_MACOS
value: ${{ or(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}
- name: VSCODE_CIBUILD
value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}
- name: VSCODE_PUBLISH
value: ${{ and(eq(parameters.VSCODE_PUBLISH, true), eq(variables.VSCODE_CIBUILD, false)) }}
- name: VSCODE_SCHEDULEDBUILD
value: ${{ eq(variables['Build.Reason'], 'Schedule') }}
- name: VSCODE_STEP_ON_IT
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
resources: resources:
containers: containers:
- container: vscode-x64 - container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64 image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
endpoint: VSCodeHub endpoint: VSCodeHub
options: --user 0:0
- container: vscode-arm64 - container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64 image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub endpoint: VSCodeHub
@ -26,177 +122,189 @@ stages:
- stage: Compile - stage: Compile
jobs: jobs:
- job: Compile - job: Compile
pool: pool: compile
vmImage: "Ubuntu-16.04"
container: vscode-x64
variables: variables:
VSCODE_ARCH: x64 VSCODE_ARCH: x64
steps: steps:
- template: product-compile.yml - template: product-compile.yml
- stage: Windows - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}:
dependsOn: - stage: Windows
- Compile dependsOn:
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false')) - Compile
pool: pool:
vmImage: VS2017-Win2016 vmImage: VS2017-Win2016
jobs: jobs:
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: Windows32 - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')) - job: Windows
timeoutInMinutes: 90 timeoutInMinutes: 90
variables: variables:
VSCODE_ARCH: ia32 VSCODE_ARCH: x64
steps: steps:
- template: win32/product-build-win32.yml - template: win32/product-build-win32.yml
- job: WindowsARM64 - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true)) }}:
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true')) - job: Windows32
timeoutInMinutes: 90 timeoutInMinutes: 90
variables: variables:
VSCODE_ARCH: arm64 VSCODE_ARCH: ia32
steps: steps:
- template: win32/product-build-win32.yml - template: win32/product-build-win32.yml
- stage: Linux - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}:
dependsOn: - job: WindowsARM64
- Compile timeoutInMinutes: 90
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false')) variables:
pool: VSCODE_ARCH: arm64
vmImage: "Ubuntu-16.04" steps:
jobs: - template: win32/product-build-win32.yml
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}:
dependsOn: - stage: Linux
dependsOn:
- Compile
pool:
vmImage: "Ubuntu-18.04"
jobs:
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
- job: Linux
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
- job: LinuxSnap
dependsOn:
- Linux
container: snapcraft
variables:
VSCODE_ARCH: x64
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
- job: LinuxArmhf
container: vscode-armhf
variables:
VSCODE_ARCH: armhf
NPM_ARCH: armv7l
steps:
- template: linux/product-build-linux.yml
# TODO@joaomoreno: We don't ship ARM snaps for now
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
- job: LinuxSnapArmhf
dependsOn:
- LinuxArmhf
container: snapcraft
variables:
VSCODE_ARCH: armhf
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
- job: LinuxArm64
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
# TODO@joaomoreno: We don't ship ARM snaps for now
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
- job: LinuxSnapArm64
dependsOn:
- LinuxArm64
container: snapcraft
variables:
VSCODE_ARCH: arm64
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true)) }}:
- job: LinuxAlpine
steps:
- template: linux/product-build-alpine.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WEB, true)) }}:
- job: LinuxWeb
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_MACOS'], true)) }}:
- stage: macOS
dependsOn:
- Compile
pool:
vmImage: macOS-latest
jobs:
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- job: macOS
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
- job: macOSARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
- job: macOSUniversal
dependsOn:
- macOS
- macOSARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: universal
steps:
- template: darwin/product-build-darwin.yml
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
- stage: Mooncake
dependsOn:
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- Windows
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- Linux - Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true')) - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
container: snapcraft - macOS
variables: condition: succeededOrFailed()
VSCODE_ARCH: x64 pool:
steps: vmImage: "Ubuntu-18.04"
- template: linux/snap-build-linux.yml jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- job: LinuxArmhf - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true')) - stage: Release
container: vscode-armhf dependsOn:
variables: - ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
VSCODE_ARCH: armhf - Windows
NPM_ARCH: armv7l - ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
steps: - Linux
- template: linux/product-build-linux.yml - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- macOS
- job: LinuxSnapArmhf pool:
dependsOn: vmImage: "Ubuntu-18.04"
- LinuxArmhf jobs:
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true')) - job: ReleaseBuild
container: snapcraft displayName: Release Build
variables: steps:
VSCODE_ARCH: armhf - template: release.yml
steps:
- template: linux/snap-build-linux.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnapArm64
dependsOn:
- LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: arm64
steps:
- template: linux/snap-build-linux.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
steps:
- template: linux/product-build-alpine.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- stage: macOS
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- stage: macOSARM64
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS_ARM64'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- stage: Mooncake
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- stage: Publish
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: BuildService
displayName: Build Service
steps:
- template: release.yml

View File

@ -1,36 +1,17 @@
steps: steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
dryRun: true
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets" displayName: "Azure Key Vault: Get Secrets"
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: | - script: |
set -e set -e
@ -43,36 +24,46 @@ steps:
git config user.email "vscode@microsoft.com" git config user.email "vscode@microsoft.com"
git config user.name "VSCode" git config user.name "VSCode"
displayName: Prepare tooling displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: | - script: |
set -e set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
- task: Cache@2
inputs:
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: | - script: |
npx https://aka.ms/enablesecurefeed standAlone npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5 timeoutInMinutes: 5
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['ENABLE_TERRAPIN'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
echo -n $(VSCODE_ARCH) > .build/arch - script: |
displayName: Prepare yarn cache flags set -e
sudo apt update -y
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libsecret-1-dev libnotify-bin
inputs: displayName: Install build tools
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: | - script: |
set -e set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then if [ $i -eq 3 ]; then
@ -81,67 +72,50 @@ steps:
fi fi
echo "Yarn failed $i, trying again..." echo "Yarn failed $i, trying again..."
done done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
set -e set -e
yarn postinstall node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
displayName: Run postinstall scripts mkdir -p .build/node_modules_cache
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true')) tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
# Mixin must run before optimize, because the CSS loader will # Mixin must run before optimize, because the CSS loader will inline small SVGs
# inline small SVGs
- script: | - script: |
set -e set -e
node build/azure-pipelines/mixin node build/azure-pipelines/mixin
displayName: Mix in quality displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: | - script: |
set -e set -e
yarn gulp hygiene yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
yarn monaco-compile-check displayName: Compile & Hygiene
yarn valid-layers-check
displayName: Run hygiene, monaco compile & valid layers checks
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp minify-vscode-reh
yarn gulp minify-vscode-reh-web
displayName: Compile
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: | - script: |
set -e set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \ AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: | - script: |
set -e set -e
@ -149,13 +123,16 @@ steps:
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \ AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/createBuild.js $VERSION node build/azure-pipelines/common/createBuild.js $VERSION
displayName: Create build displayName: Create build
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 # we gotta tarball everything in order to preserve file permissions
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1
inputs: inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin" targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min" artifactName: Compilation
vstsFeed: "npm-vscode" displayName: Publish compilation artifact
platformIndependent: true
alias: "Compilation"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))

View File

@ -1,2 +0,0 @@
node_modules/
*.js

View File

@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
return false;
}
return true;
}

View File

@ -9,7 +9,7 @@ pr: none
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:

View File

@ -0,0 +1,72 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function repeat(str, times) {
const result = new Array(times);
for (let i = 0; i < times; i++) {
result[i] = str;
}
return result.join('');
}
function convertTabsToSpaces(str) {
return str.replace(/\t/gm, value => repeat(' ', value.length));
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length));
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the MIT License.`,
` * See https://github.com/microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:

View File

@ -0,0 +1,35 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const es = require("event-stream");
const vfs = require("vinyl-fs");
const util = require("../lib/util");
const filter = require("gulp-filter");
const gzip = require("gulp-gzip");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
function main() {
return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
.pipe(filter(f => !f.isDirectory()))
.pipe(gzip({ append: false }))
.pipe(es.through(function (data) {
console.log('Uploading CDN file:', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: process.env.VSCODE_QUALITY,
prefix: commit + '/',
contentSettings: {
contentEncoding: 'gzip',
cacheControl: 'max-age=31536000, public'
}
}));
}
main();

View File

@ -0,0 +1,55 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const es = require("event-stream");
const vfs = require("vinyl-fs");
const util = require("../lib/util");
// @ts-ignore
const deps = require("../lib/dependencies");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
function src(base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync((f) => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
}
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = src('out-vscode-min'); // client source-maps only
sources.push(vs);
const productionDependencies = deps.getProductionDependencies(root);
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
sources.push(nodeModules);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(src(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
}
main();

View File

@ -11,7 +11,7 @@ import * as Vinyl from 'vinyl';
import * as vfs from 'vinyl-fs'; import * as vfs from 'vinyl-fs';
import * as util from '../lib/util'; import * as util from '../lib/util';
// @ts-ignore // @ts-ignore
import * as deps from '../dependencies'; import * as deps from '../lib/dependencies';
const azure = require('gulp-azure-storage'); const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));

View File

@ -1,28 +1,7 @@
steps: steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- script: | - script: |
set -e set -e
cat << EOF > ~/.netrc cat << EOF > ~/.netrc
@ -48,30 +38,35 @@ steps:
- script: | - script: |
set -e set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro displayName: Merge distro
- script: | - script: |
npx https://aka.ms/enablesecurefeed standAlone mkdir -p .build
displayName: Switch to Terrapin packages node build/azure-pipelines/common/computeNodeModulesCacheKey.js "web" $ENABLE_TERRAPIN > .build/yarnlockhash
timeoutInMinutes: 5 displayName: Prepare yarn cache flags
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: | - task: Cache@2
echo -n "web" > .build/arch
displayName: Prepare yarn cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" path: .build/node_modules_cache
vstsFeed: "npm-vscode" cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: | - script: |
set -e set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then if [ $i -eq 3 ]; then
@ -80,21 +75,19 @@ steps:
fi fi
echo "Yarn failed $i, trying again..." echo "Yarn failed $i, trying again..."
done done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
set -e set -e
yarn postinstall node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
displayName: Run postinstall scripts mkdir -p .build/node_modules_cache
condition: and(succeeded(), eq(variables['CacheRestored'], 'true')) tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: | - script: |
set -e set -e
@ -130,3 +123,8 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh ./build/azure-pipelines/web/publish.sh
displayName: Publish displayName: Publish
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
artifact: vscode-web-standalone
displayName: Publish web archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@ -1,6 +1,10 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<configuration> <configuration>
<packageSources> <packageSources>
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" /> <clear />
</packageSources> <add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration> </configuration>

View File

@ -1,87 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: "2.x"
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "vscode-build-cache"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
yarn electron
displayName: Download Electron
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- powershell: |
yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
displayName: Run Unit Tests (Browser)
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
displayName: "Publish Crash Reports"
inputs:
artifactName: crash-dump-windows
targetPath: .build\crashes
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@ -3,7 +3,7 @@ $ErrorActionPreference = "Stop"
$CertBytes = [System.Convert]::FromBase64String($CertBase64) $CertBytes = [System.Convert]::FromBase64String($CertBase64)
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection $CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable) $CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine") $CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
$CertStore.Open("ReadWrite") $CertStore.Open("ReadWrite")

View File

@ -1,28 +1,7 @@
steps: steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.14.1" versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
@ -39,6 +18,18 @@ steps:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { tar --force-local -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz }
displayName: Extract compilation output
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
@ -51,26 +42,34 @@ steps:
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" } exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- powershell: | - powershell: |
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch "$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache flags displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: Cache@2
inputs: inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
targetfolder: "**/node_modules, !**/node_modules/**/node_modules" path: .build/node_modules_cache
vstsFeed: "npm-vscode" cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { 7z.exe x .build/node_modules_cache/cache.7z -aos }
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
@ -79,22 +78,20 @@ steps:
$env:npm_config_arch="$(VSCODE_ARCH)" $env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1" $env:CHILD_CONCURRENCY="1"
retry { exec { yarn --frozen-lockfile } } retry { exec { yarn --frozen-lockfile } }
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { yarn postinstall } exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt }
displayName: Run postinstall scripts exec { mkdir -Force .build/node_modules_cache }
condition: and(succeeded(), eq(variables['CacheRestored'], 'true')) exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
@ -107,11 +104,18 @@ steps:
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" $env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" } exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)" echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
displayName: Build displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
displayName: Prepare Package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
@ -122,12 +126,21 @@ steps:
displayName: Build Server displayName: Build Server
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" }
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) } exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" } exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests (Electron) displayName: Run unit tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
@ -135,6 +148,14 @@ steps:
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" } exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
displayName: Run unit tests (Browser) displayName: Run unit tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/integration/browser compile }
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
@ -148,6 +169,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort $AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" } exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron) displayName: Run integration tests (Electron)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
@ -155,6 +177,7 @@ steps:
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox } exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser) displayName: Run integration tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
@ -165,6 +188,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort $AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat } exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
displayName: Run remote integration tests (Electron) displayName: Run remote integration tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
@ -180,7 +204,7 @@ steps:
inputs: inputs:
testResultsFiles: "*-results.xml" testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs: inputs:
@ -236,6 +260,7 @@ steps:
} }
] ]
SessionTimeout: 120 SessionTimeout: 120
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: NuGetCommand@2 - task: NuGetCommand@2
displayName: Install ESRPClient.exe displayName: Install ESRPClient.exe
@ -245,11 +270,13 @@ steps:
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config' nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget" externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages restoreDirectory: packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ESRPImportCertTask@1 - task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate displayName: Import ESRP Request Signing Certificate
inputs: inputs:
ESRP: "ESRP CodeSign" ESRP: "ESRP CodeSign"
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PowerShell@2 - task: PowerShell@2
inputs: inputs:
@ -257,6 +284,7 @@ steps:
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
arguments: "$(ESRP-SSL-AADAuth)" arguments: "$(ESRP-SSL-AADAuth)"
displayName: Import ESRP Auth Certificate displayName: Import ESRP Auth Certificate
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
@ -266,6 +294,32 @@ steps:
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" $env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1 .\build\azure-pipelines\win32\publish.ps1
displayName: Publish displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
artifact: vscode-win32-$(VSCODE_ARCH)
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
artifact: vscode-win32-$(VSCODE_ARCH)-setup
displayName: Publish system setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
displayName: Publish user setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
artifact: vscode-server-win32-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection" displayName: "Component Detection"

View File

@ -6,8 +6,7 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const os = require('os'); const os = require('os');
const { remote } = require('electron'); const { ipcRenderer } = require('electron');
const dialog = remote.dialog;
const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json'); const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
@ -84,17 +83,13 @@ function render(el, state) {
} }
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local); const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
localInput.onchange = function () { localInput.onchange = async function () {
const result = dialog.showOpenDialog(remote.getCurrentWindow(), { const result = await ipcRenderer.invoke('pickdir');
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result && result.length >= 1) { if (result) {
control[ext.name] = result[0]; control[ext.name] = result;
setState({ builtin, control });
} }
setState({ builtin, control });
}; };
if (local) { if (local) {

View File

@ -3,12 +3,25 @@
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const { app, BrowserWindow } = require('electron'); const { app, BrowserWindow, ipcMain, dialog } = require('electron');
const url = require('url'); const url = require('url');
const path = require('path'); const path = require('path');
let window = null; let window = null;
ipcMain.handle('pickdir', async () => {
const result = await dialog.showOpenDialog(window, {
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result.canceled || result.filePaths.length < 1) {
return undefined;
}
return result.filePaths[0];
});
app.once('ready', () => { app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } }); window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
window.setMenuBarVisibility(false); window.setMenuBarVisibility(false);

View File

@ -0,0 +1,58 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const vscode_universal_1 = require("vscode-universal");
const fs = require("fs-extra");
const path = require("path");
const plist = require("plist");
const product = require("../../product.json");
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await vscode_universal_1.makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
arm64AsarPath,
filesToSkip: [
'product.json',
'Credits.rtf',
'CodeResources',
'fsevents.node',
'.npmrc'
],
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});
await fs.writeJson(productJsonPath, productJson);
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@ -0,0 +1,66 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { makeUniversalApp } from 'vscode-universal';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as plist from 'plist';
import * as product from '../../product.json';
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
arm64AsarPath,
filesToSkip: [
'product.json',
'Credits.rtf',
'CodeResources',
'fsevents.node',
'.npmrc'
],
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});
await fs.writeJson(productJsonPath, productJson);
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@ -0,0 +1,58 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const codesign = require("electron-osx-sign");
const path = require("path");
const util = require("../lib/util");
const product = require("../../product.json");
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
if (!tempDir) {
throw new Error('$AGENT_TEMPDIRECTORY not set');
}
const baseDir = path.dirname(__dirname);
const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
const appName = product.nameLong + '.app';
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const defaultOpts = {
app: path.join(appRoot, appName),
platform: 'darwin',
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'),
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'app-entitlements.plist'),
hardenedRuntime: true,
'pre-auto-entitlements': false,
'pre-embed-provisioning-profile': false,
keychain: path.join(tempDir, 'buildagent.keychain'),
version: util.getElectronVersion(),
identity: '99FM488X57',
'gatekeeper-assess': false
};
const appOpts = Object.assign(Object.assign({}, defaultOpts), {
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
ignore: (filePath) => {
return filePath.includes(gpuHelperAppName) ||
filePath.includes(rendererHelperAppName);
} });
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts);
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@ -29,7 +29,6 @@ async function main(): Promise<void> {
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks'); const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort; const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app'; const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app'; const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const defaultOpts: codesign.SignOptions = { const defaultOpts: codesign.SignOptions = {
@ -51,7 +50,6 @@ async function main(): Promise<void> {
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign // TODO(deepak1556): Incorrectly declared type in electron-osx-sign
ignore: (filePath: string) => { ignore: (filePath: string) => {
return filePath.includes(gpuHelperAppName) || return filePath.includes(gpuHelperAppName) ||
filePath.includes(pluginHelperAppName) ||
filePath.includes(rendererHelperAppName); filePath.includes(rendererHelperAppName);
} }
}; };
@ -63,13 +61,6 @@ async function main(): Promise<void> {
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
}; };
const pluginHelperOpts: codesign.SignOptions = {
...defaultOpts,
app: path.join(appFrameworkPath, pluginHelperAppName),
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
};
const rendererHelperOpts: codesign.SignOptions = { const rendererHelperOpts: codesign.SignOptions = {
...defaultOpts, ...defaultOpts,
app: path.join(appFrameworkPath, rendererHelperAppName), app: path.join(appFrameworkPath, rendererHelperAppName),
@ -78,7 +69,6 @@ async function main(): Promise<void> {
}; };
await codesign.signAsync(gpuHelperOpts); await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(pluginHelperOpts);
await codesign.signAsync(rendererHelperOpts); await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts as any); await codesign.signAsync(appOpts as any);
} }

View File

@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const es = require('event-stream');
const vfs = require('vinyl-fs');
const { jsHygieneFilter, tsHygieneFilter } = require('./filters');
function eslint() {
const gulpeslint = require('gulp-eslint');
return vfs
.src([...jsHygieneFilter, ...tsHygieneFilter], { base: '.', follow: true, allowEmpty: true })
.pipe(
gulpeslint({
configFile: '.eslintrc.json',
rulePaths: ['./build/lib/eslint'],
})
)
.pipe(gulpeslint.formatEach('compact'))
.pipe(
gulpeslint.results((results) => {
if (results.warningCount > 0 || results.errorCount > 0) {
throw new Error('eslint failed with warnings and/or errors');
}
})
).pipe(es.through(function () { /* noop, important for the stream to end */ }));
}
if (require.main === module) {
eslint().on('error', (err) => {
console.error();
console.error(err);
process.exit(1);
});
}

207
lib/vscode/build/ext.js Normal file
View File

@ -0,0 +1,207 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs').promises;
const path = require('path');
const cp = require('child_process');
const os = require('os');
const mkdirp = require('mkdirp');
const product = require('../product.json');
const root = path.resolve(path.join(__dirname, '..', '..'));
const exists = (path) => fs.stat(path).then(() => true, () => false);
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
async function readControlFile() {
try {
return JSON.parse(await fs.readFile(controlFilePath, 'utf8'));
} catch (err) {
return {};
}
}
async function writeControlFile(control) {
await mkdirp(path.dirname(controlFilePath));
await fs.writeFile(controlFilePath, JSON.stringify(control, null, ' '));
}
async function exec(cmd, args, opts = {}) {
return new Promise((c, e) => {
const child = cp.spawn(cmd, args, { stdio: 'inherit', env: process.env, ...opts });
child.on('close', code => code === 0 ? c() : e(`Returned ${code}`));
});
}
function getFolderPath(extDesc) {
const folder = extDesc.repo.replace(/.*\//, '');
return folderPath = path.join(root, folder);
}
async function getExtensionType(folderPath) {
const pkg = JSON.parse(await fs.readFile(path.join(folderPath, 'package.json'), 'utf8'));
if (pkg['contributes']['themes'] || pkg['contributes']['iconThemes']) {
return 'theme';
} else if (pkg['contributes']['grammars']) {
return 'grammar';
} else {
return 'misc';
}
}
async function initExtension(extDesc) {
const folderPath = getFolderPath(extDesc);
if (!await exists(folderPath)) {
console.log(`⏳ git clone: ${extDesc.name}`);
await exec('git', ['clone', `${extDesc.repo}.git`], { cwd: root });
}
const type = await getExtensionType(folderPath);
return { path: folderPath, type, ...extDesc };
}
async function createWorkspace(type, extensions) {
const workspaceName = `vscode-${type}-extensions.code-workspace`;
const workspacePath = path.join(root, workspaceName);
const workspace = { folders: extensions.map(ext => ({ path: path.basename(ext.path) })) };
if (!await exists(workspacePath)) {
console.log(`✅ create workspace: ${workspaceName}`);
}
await fs.writeFile(workspacePath, JSON.stringify(workspace, undefined, ' '));
}
async function init() {
const extensions = [];
for (const extDesc of product.builtInExtensions) {
extensions.push(await initExtension(extDesc));
}
await createWorkspace('all', extensions);
const byType = extensions
.reduce((m, e) => m.set(e.type, [...(m.get(e.type) || []), e]), new Map());
for (const [type, extensions] of byType) {
await createWorkspace(type, extensions);
}
return byType;
}
async function status() {
const byType = await init();
const control = await readControlFile();
for (const [type, extensions] of byType) {
console.log(`${type} (${extensions.length} extensions):`);
const maxWidth = Math.max(...extensions.map(e => e.name.length));
for (const ext of extensions) {
console.log(` ${ext.name.padEnd(maxWidth, ' ')}${control[ext.name]}`);
}
}
console.log(`total: ${product.builtInExtensions.length} extensions`);
}
async function each([cmd, ...args], opts) {
await init();
for (const extDesc of product.builtInExtensions) {
const folderPath = getFolderPath(extDesc);
if (opts.type) {
const type = await getExtensionType(folderPath);
if (type !== opts.type) {
continue;
}
}
console.log(`👉 ${extDesc.name}`);
await exec(cmd, args, { cwd: folderPath });
}
}
async function _link(extensions, opts, fn) {
await init();
const control = await readControlFile();
for (const extDesc of product.builtInExtensions) {
if (extensions.length > 0 && extensions.indexOf(extDesc.name) === -1) {
continue;
}
if (opts.type) {
const folderPath = getFolderPath(extDesc);
const type = await getExtensionType(folderPath);
if (type !== opts.type) {
continue;
}
}
await fn(control, extDesc);
}
await writeControlFile(control);
}
async function link(extensions, opts) {
await _link(extensions, opts, async (control, extDesc) => {
const ext = await initExtension(extDesc);
control[extDesc.name] = ext.path;
console.log(`👉 link: ${extDesc.name}${ext.path}`);
});
}
async function unlink(extensions, opts) {
await _link(extensions, opts, async (control, extDesc) => {
control[extDesc.name] = 'marketplace';
console.log(`👉 unlink: ${extDesc.name}`);
});
}
if (require.main === module) {
const { program } = require('commander');
program.version('0.0.1');
program
.command('init')
.description('Initialize workspace with built-in extensions')
.action(init);
program
.command('status')
.description('Print extension status')
.action(status);
program
.command('each <command...>')
.option('-t, --type <type>', 'Specific type only')
.description('Run a command in each extension repository')
.allowUnknownOption()
.action(each);
program
.command('link [extensions...]')
.option('-t, --type <type>', 'Specific type only')
.description('Link with code-oss')
.action(link);
program
.command('unlink [extensions...]')
.option('-t, --type <type>', 'Specific type only')
.description('Unlink from code-oss')
.action(unlink);
program.parseAsync(process.argv);
}

151
lib/vscode/build/filters.js Normal file
View File

@ -0,0 +1,151 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all eol indentation copyright typescript
*/
module.exports.all = [
'*',
'build/**/*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!out*/**',
'!test/**/out/**',
'!**/node_modules/**',
];
module.exports.indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
// except specific folders
'!test/automation/out/**',
'!test/monaco/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/dist/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
];
module.exports.copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
];
module.exports.jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
];
module.exports.tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/**/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
];

View File

@ -11,6 +11,11 @@ const task = require('./lib/task');
const compilation = require('./lib/compilation'); const compilation = require('./lib/compilation');
// Full compile, including nls and inline sources in sourcemaps, for build // Full compile, including nls and inline sources in sourcemaps, for build
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true))); const compileBuildTask = task.define('compile-build',
task.series(
util.rimraf('out-build'),
compilation.compileTask('src', 'out-build', true)
)
);
gulp.task(compileBuildTask); gulp.task(compileBuildTask);
exports.compileBuildTask = compileBuildTask; exports.compileBuildTask = compileBuildTask;

Some files were not shown because too many files have changed in this diff Show More