mirror of
https://github.com/docker/metadata-action.git
synced 2024-12-26 04:07:43 +01:00
switch to actions-toolkit implementation
Signed-off-by: CrazyMax <crazy-max@users.noreply.github.com>
This commit is contained in:
parent
766400ca14
commit
b5c378621c
@ -1,168 +1,63 @@
|
||||
import {describe, expect, it, jest} from '@jest/globals';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {beforeEach, describe, expect, test} from '@jest/globals';
|
||||
|
||||
import * as context from '../src/context';
|
||||
|
||||
jest.spyOn(context, 'tmpDir').mockImplementation((): string => {
|
||||
const tmpDir = path.join('/tmp/.docker-metadata-action-jest').split(path.sep).join(path.posix.sep);
|
||||
if (!fs.existsSync(tmpDir)) {
|
||||
fs.mkdirSync(tmpDir, {recursive: true});
|
||||
}
|
||||
return tmpDir;
|
||||
});
|
||||
|
||||
describe('getInputList', () => {
|
||||
it('single line correctly', async () => {
|
||||
await setInput('foo', 'bar');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar']);
|
||||
describe('getInputs', () => {
|
||||
beforeEach(() => {
|
||||
process.env = Object.keys(process.env).reduce((object, key) => {
|
||||
if (!key.startsWith('INPUT_')) {
|
||||
object[key] = process.env[key];
|
||||
}
|
||||
return object;
|
||||
}, {});
|
||||
});
|
||||
|
||||
it('multiline correctly', async () => {
|
||||
setInput('foo', 'bar\nbaz');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz']);
|
||||
});
|
||||
|
||||
it('empty lines correctly', async () => {
|
||||
setInput('foo', 'bar\n\nbaz');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz']);
|
||||
});
|
||||
|
||||
it('comment correctly', async () => {
|
||||
setInput('foo', 'bar\n#com\n"#taken"\nhello#comment\nbaz');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', '#taken', 'hello', 'baz']);
|
||||
});
|
||||
|
||||
it('comma correctly', async () => {
|
||||
setInput('foo', 'bar,baz');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz']);
|
||||
});
|
||||
|
||||
it('empty result correctly', async () => {
|
||||
setInput('foo', 'bar,baz,');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz']);
|
||||
});
|
||||
|
||||
it('different new lines correctly', async () => {
|
||||
setInput('foo', 'bar\r\nbaz');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz']);
|
||||
});
|
||||
|
||||
it('different new lines and comma correctly', async () => {
|
||||
setInput('foo', 'bar\r\nbaz,bat');
|
||||
const res = context.getInputList('foo');
|
||||
expect(res).toEqual(['bar', 'baz', 'bat']);
|
||||
});
|
||||
|
||||
it('multiline and ignoring comma correctly', async () => {
|
||||
setInput('cache-from', 'user/app:cache\ntype=local,src=path/to/dir');
|
||||
const res = context.getInputList('cache-from', true);
|
||||
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']);
|
||||
});
|
||||
|
||||
it('different new lines and ignoring comma correctly', async () => {
|
||||
setInput('cache-from', 'user/app:cache\r\ntype=local,src=path/to/dir');
|
||||
const res = context.getInputList('cache-from', true);
|
||||
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']);
|
||||
});
|
||||
|
||||
it('multiline values', async () => {
|
||||
setInput(
|
||||
'secrets',
|
||||
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
|
||||
"MYSECRET=aaaaaaaa
|
||||
bbbbbbb
|
||||
ccccccccc"
|
||||
FOO=bar`
|
||||
);
|
||||
const res = context.getInputList('secrets', true);
|
||||
expect(res).toEqual([
|
||||
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
|
||||
`MYSECRET=aaaaaaaa
|
||||
bbbbbbb
|
||||
ccccccccc`,
|
||||
'FOO=bar'
|
||||
]);
|
||||
});
|
||||
|
||||
it('multiline values with empty lines', async () => {
|
||||
setInput(
|
||||
'secrets',
|
||||
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
|
||||
"MYSECRET=aaaaaaaa
|
||||
bbbbbbb
|
||||
ccccccccc"
|
||||
FOO=bar
|
||||
"EMPTYLINE=aaaa
|
||||
|
||||
bbbb
|
||||
ccc"`
|
||||
);
|
||||
const res = context.getInputList('secrets', true);
|
||||
expect(res).toEqual([
|
||||
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
|
||||
`MYSECRET=aaaaaaaa
|
||||
bbbbbbb
|
||||
ccccccccc`,
|
||||
'FOO=bar',
|
||||
`EMPTYLINE=aaaa
|
||||
|
||||
bbbb
|
||||
ccc`
|
||||
]);
|
||||
});
|
||||
|
||||
it('multiline values without quotes', async () => {
|
||||
setInput(
|
||||
'secrets',
|
||||
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
|
||||
MYSECRET=aaaaaaaa
|
||||
bbbbbbb
|
||||
ccccccccc
|
||||
FOO=bar`
|
||||
);
|
||||
const res = context.getInputList('secrets', true);
|
||||
expect(res).toEqual(['GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789', 'MYSECRET=aaaaaaaa', 'bbbbbbb', 'ccccccccc', 'FOO=bar']);
|
||||
});
|
||||
|
||||
it('multiline values escape quotes', async () => {
|
||||
setInput(
|
||||
'secrets',
|
||||
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
|
||||
"MYSECRET=aaaaaaaa
|
||||
bbbb""bbb
|
||||
ccccccccc"
|
||||
FOO=bar`
|
||||
);
|
||||
const res = context.getInputList('secrets', true);
|
||||
expect(res).toEqual([
|
||||
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
|
||||
`MYSECRET=aaaaaaaa
|
||||
bbbb"bbb
|
||||
ccccccccc`,
|
||||
'FOO=bar'
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('asyncForEach', () => {
|
||||
it('executes async tasks sequentially', async () => {
|
||||
const testValues = [1, 2, 3, 4, 5];
|
||||
const results: number[] = [];
|
||||
|
||||
await context.asyncForEach(testValues, async value => {
|
||||
results.push(value);
|
||||
});
|
||||
|
||||
expect(results).toEqual(testValues);
|
||||
});
|
||||
// prettier-ignore
|
||||
test.each([
|
||||
[
|
||||
0,
|
||||
new Map<string, string>([
|
||||
['images', 'moby/buildkit\nghcr.io/moby/mbuildkit'],
|
||||
]),
|
||||
{
|
||||
bakeTarget: 'docker-metadata-action',
|
||||
flavor: [],
|
||||
githubToken: '',
|
||||
images: ['moby/buildkit', 'ghcr.io/moby/mbuildkit'],
|
||||
labels: [],
|
||||
sepLabels: '\n',
|
||||
sepTags: '\n',
|
||||
tags: [],
|
||||
} as context.Inputs
|
||||
],
|
||||
[
|
||||
1,
|
||||
new Map<string, string>([
|
||||
['bake-target', 'metadata'],
|
||||
['images', 'moby/buildkit'],
|
||||
['sep-labels', ','],
|
||||
['sep-tags', ','],
|
||||
]),
|
||||
{
|
||||
bakeTarget: 'metadata',
|
||||
flavor: [],
|
||||
githubToken: '',
|
||||
images: ['moby/buildkit'],
|
||||
labels: [],
|
||||
sepLabels: ',',
|
||||
sepTags: ',',
|
||||
tags: [],
|
||||
} as context.Inputs
|
||||
]
|
||||
])(
|
||||
'[%d] given %p as inputs, returns %p',
|
||||
async (num: number, inputs: Map<string, string>, expected: context.Inputs) => {
|
||||
inputs.forEach((value: string, name: string) => {
|
||||
setInput(name, value);
|
||||
});
|
||||
expect(await context.getInputs()).toEqual(expected);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
|
||||
|
@ -1,4 +1,5 @@
|
||||
import {describe, expect, test} from '@jest/globals';
|
||||
|
||||
import {Flavor, Transform} from '../src/flavor';
|
||||
|
||||
describe('transform', () => {
|
||||
|
@ -1,14 +0,0 @@
|
||||
import {describe, expect, jest, it} from '@jest/globals';
|
||||
import * as github from '../src/github';
|
||||
|
||||
import * as repoFixture from './fixtures/repo.json';
|
||||
jest.spyOn(github, 'repo').mockImplementation((): Promise<github.ReposGetResponseData> => {
|
||||
return <Promise<github.ReposGetResponseData>>(repoFixture as unknown);
|
||||
});
|
||||
|
||||
describe('repo', () => {
|
||||
it('returns GitHub repository', async () => {
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
expect(repo.name).not.toBeNull();
|
||||
});
|
||||
});
|
@ -1,4 +1,5 @@
|
||||
import {describe, expect, test} from '@jest/globals';
|
||||
|
||||
import {Transform, Image} from '../src/image';
|
||||
|
||||
describe('transform', () => {
|
||||
|
@ -2,19 +2,17 @@ import {beforeEach, describe, expect, jest, test} from '@jest/globals';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as dotenv from 'dotenv';
|
||||
import moment from 'moment-timezone';
|
||||
import {getInputs, Inputs} from '../src/context';
|
||||
import * as github from '../src/github';
|
||||
import {Meta, Version} from '../src/meta';
|
||||
import {Context} from '@actions/github/lib/context';
|
||||
import {GitHub} from '@docker/actions-toolkit/lib/github';
|
||||
import {Toolkit} from '@docker/actions-toolkit/lib/toolkit';
|
||||
import {GitHubRepo} from '@docker/actions-toolkit/lib/types/github';
|
||||
|
||||
import * as repoFixture from './fixtures/repo.json';
|
||||
jest.spyOn(github, 'repo').mockImplementation((): Promise<github.ReposGetResponseData> => {
|
||||
return <Promise<github.ReposGetResponseData>>(repoFixture as unknown);
|
||||
});
|
||||
import {getInputs, Inputs} from '../src/context';
|
||||
import {Meta, Version} from '../src/meta';
|
||||
|
||||
jest.spyOn(github, 'context').mockImplementation((): Context => {
|
||||
return new Context();
|
||||
import repoFixture from './fixtures/repo.json';
|
||||
jest.spyOn(GitHub.prototype, 'repoData').mockImplementation((): Promise<GitHubRepo> => {
|
||||
return <Promise<GitHubRepo>>(repoFixture as unknown);
|
||||
});
|
||||
|
||||
jest.spyOn(global.Date.prototype, 'toISOString').mockImplementation(() => {
|
||||
@ -26,6 +24,7 @@ jest.mock('moment-timezone', () => {
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
Object.keys(process.env).forEach(function (key) {
|
||||
if (key !== 'GITHUB_TOKEN' && key.startsWith('GITHUB_')) {
|
||||
delete process.env[key];
|
||||
@ -48,10 +47,9 @@ describe('isRawStatement', () => {
|
||||
|
||||
const tagsLabelsTest = async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => {
|
||||
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
|
||||
const context = github.context();
|
||||
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
|
||||
const toolkit = new Toolkit();
|
||||
const repo = await toolkit.github.repoData();
|
||||
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
|
||||
|
||||
const version = meta.version;
|
||||
expect(version).toEqual(exVersion);
|
||||
@ -2765,10 +2763,10 @@ describe('pr-head-sha', () => {
|
||||
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => {
|
||||
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
|
||||
process.env.DOCKER_METADATA_PR_HEAD_SHA = 'true';
|
||||
const context = github.context();
|
||||
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
|
||||
const toolkit = new Toolkit();
|
||||
const repo = await toolkit.github.repoData();
|
||||
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
|
||||
|
||||
const version = meta.version;
|
||||
expect(version).toEqual(exVersion);
|
||||
@ -3707,10 +3705,10 @@ describe('json', () => {
|
||||
]
|
||||
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exJSON: unknown) => {
|
||||
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
|
||||
const context = github.context();
|
||||
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
|
||||
const toolkit = new Toolkit();
|
||||
const repo = await toolkit.github.repoData();
|
||||
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
|
||||
|
||||
const jsonOutput = meta.getJSON();
|
||||
expect(jsonOutput).toEqual(exJSON);
|
||||
@ -4013,10 +4011,10 @@ describe('bake', () => {
|
||||
]
|
||||
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exBakeDefinition: unknown) => {
|
||||
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
|
||||
const context = github.context();
|
||||
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
|
||||
const toolkit = new Toolkit();
|
||||
const repo = await toolkit.github.repoData();
|
||||
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
|
||||
|
||||
const bakeFile = meta.getBakeFile();
|
||||
expect(JSON.parse(fs.readFileSync(bakeFile, 'utf8'))).toEqual(exBakeDefinition);
|
||||
@ -4059,10 +4057,10 @@ describe('sepTags', () => {
|
||||
]
|
||||
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, expTags: string) => {
|
||||
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
|
||||
const context = github.context();
|
||||
|
||||
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
|
||||
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
|
||||
const toolkit = new Toolkit();
|
||||
const repo = await toolkit.github.repoData();
|
||||
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
|
||||
|
||||
expect(meta.getTags().join(inputs.sepTags)).toEqual(expTags);
|
||||
});
|
||||
|
@ -1,4 +1,5 @@
|
||||
import {describe, expect, test} from '@jest/globals';
|
||||
|
||||
import {Transform, Parse, Tag, Type, RefEvent, ShaFormat, DefaultPriorities} from '../src/tag';
|
||||
|
||||
describe('transform', () => {
|
||||
|
@ -16,14 +16,14 @@ COPY --from=deps /vendor /
|
||||
|
||||
FROM deps AS vendor-validate
|
||||
RUN --mount=type=bind,target=.,rw <<EOT
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /vendor/* .
|
||||
if [ -n "$(git status --porcelain -- yarn.lock)" ]; then
|
||||
echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor-update"'
|
||||
git status --porcelain -- yarn.lock
|
||||
exit 1
|
||||
fi
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /vendor/* .
|
||||
if [ -n "$(git status --porcelain -- yarn.lock)" ]; then
|
||||
echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor-update"'
|
||||
git status --porcelain -- yarn.lock
|
||||
exit 1
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM deps AS build
|
||||
@ -36,14 +36,14 @@ COPY --from=build /out /
|
||||
|
||||
FROM build AS build-validate
|
||||
RUN --mount=type=bind,target=.,rw <<EOT
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /out/* .
|
||||
if [ -n "$(git status --porcelain -- dist)" ]; then
|
||||
echo >&2 'ERROR: Build result differs. Please build first with "docker buildx bake build"'
|
||||
git status --porcelain -- dist
|
||||
exit 1
|
||||
fi
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /out/* .
|
||||
if [ -n "$(git status --porcelain -- dist)" ]; then
|
||||
echo >&2 'ERROR: Build result differs. Please build first with "docker buildx bake build"'
|
||||
git status --porcelain -- dist
|
||||
exit 1
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM deps AS format
|
||||
|
@ -1,5 +1,21 @@
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-metadata-action-'));
|
||||
|
||||
process.env = Object.assign({}, process.env, {
|
||||
TEMP: tmpDir,
|
||||
GITHUB_REPOSITORY: 'docker/metadata-action',
|
||||
RUNNER_TEMP: path.join(tmpDir, 'runner-temp'),
|
||||
RUNNER_TOOL_CACHE: path.join(tmpDir, 'runner-tool-cache')
|
||||
}) as {
|
||||
[key: string]: string;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
testEnvironment: 'node',
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
setupFiles: ['dotenv/config'],
|
||||
testMatch: ['**/*.test.ts'],
|
||||
@ -9,5 +25,7 @@ module.exports = {
|
||||
moduleNameMapper: {
|
||||
'^csv-parse/sync': '<rootDir>/node_modules/csv-parse/dist/cjs/sync.cjs'
|
||||
},
|
||||
collectCoverageFrom: ['src/**/{!(main.ts),}.ts'],
|
||||
coveragePathIgnorePatterns: ['lib/', 'node_modules/', '__mocks__/', '__tests__/'],
|
||||
verbose: true
|
||||
};
|
||||
|
@ -31,6 +31,7 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@docker/actions-toolkit": "^0.1.0-beta.14",
|
||||
"@renovate/pep440": "^1.0.0",
|
||||
"csv-parse": "^5.3.3",
|
||||
"handlebars": "^4.7.7",
|
||||
|
@ -1,10 +1,5 @@
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import {parse} from 'csv-parse/sync';
|
||||
|
||||
let _tmpDir: string;
|
||||
import {Util} from '@docker/actions-toolkit/lib/util';
|
||||
|
||||
export interface Inputs {
|
||||
images: string[];
|
||||
@ -17,58 +12,15 @@ export interface Inputs {
|
||||
githubToken: string;
|
||||
}
|
||||
|
||||
export function tmpDir(): string {
|
||||
if (!_tmpDir) {
|
||||
_tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-metadata-action-')).split(path.sep).join(path.posix.sep);
|
||||
}
|
||||
return _tmpDir;
|
||||
}
|
||||
|
||||
export function getInputs(): Inputs {
|
||||
return {
|
||||
images: getInputList('images', true),
|
||||
tags: getInputList('tags', true),
|
||||
flavor: getInputList('flavor', true),
|
||||
labels: getInputList('labels', true),
|
||||
images: Util.getInputList('images', {ignoreComma: true}),
|
||||
tags: Util.getInputList('tags', {ignoreComma: true}),
|
||||
flavor: Util.getInputList('flavor', {ignoreComma: true}),
|
||||
labels: Util.getInputList('labels', {ignoreComma: true}),
|
||||
sepTags: core.getInput('sep-tags', {trimWhitespace: false}) || `\n`,
|
||||
sepLabels: core.getInput('sep-labels', {trimWhitespace: false}) || `\n`,
|
||||
bakeTarget: core.getInput('bake-target') || `docker-metadata-action`,
|
||||
githubToken: core.getInput('github-token')
|
||||
};
|
||||
}
|
||||
|
||||
export function getInputList(name: string, ignoreComma?: boolean): string[] {
|
||||
const res: Array<string> = [];
|
||||
|
||||
const items = core.getInput(name);
|
||||
if (items == '') {
|
||||
return res;
|
||||
}
|
||||
|
||||
const records = parse(items, {
|
||||
columns: false,
|
||||
relaxQuotes: true,
|
||||
comment: '#',
|
||||
relaxColumnCount: true,
|
||||
skipEmptyLines: true
|
||||
});
|
||||
|
||||
for (const record of records as Array<string[]>) {
|
||||
if (record.length == 1) {
|
||||
res.push(record[0]);
|
||||
continue;
|
||||
} else if (!ignoreComma) {
|
||||
res.push(...record);
|
||||
continue;
|
||||
}
|
||||
res.push(record.join(','));
|
||||
}
|
||||
|
||||
return res.filter(item => item).map(pat => pat.trim());
|
||||
}
|
||||
|
||||
export const asyncForEach = async (array, callback) => {
|
||||
for (let index = 0; index < array.length; index++) {
|
||||
await callback(array[index], index, array);
|
||||
}
|
||||
};
|
||||
|
@ -1,16 +0,0 @@
|
||||
import * as github from '@actions/github';
|
||||
import {Context} from '@actions/github/lib/context';
|
||||
import {components as OctoOpenApiTypes} from '@octokit/openapi-types';
|
||||
|
||||
export type ReposGetResponseData = OctoOpenApiTypes['schemas']['repository'];
|
||||
|
||||
export function context(): Context {
|
||||
return github.context;
|
||||
}
|
||||
|
||||
export async function repo(token: string): Promise<ReposGetResponseData> {
|
||||
return github
|
||||
.getOctokit(token)
|
||||
.rest.repos.get({...github.context.repo})
|
||||
.then(response => response.data as ReposGetResponseData);
|
||||
}
|
106
src/main.ts
106
src/main.ts
@ -1,34 +1,45 @@
|
||||
import * as fs from 'fs';
|
||||
import {getInputs, Inputs} from './context';
|
||||
import * as github from './github';
|
||||
import {Meta, Version} from './meta';
|
||||
import * as core from '@actions/core';
|
||||
import * as actionsToolkit from '@docker/actions-toolkit';
|
||||
import {Context} from '@actions/github/lib/context';
|
||||
import {GitHub} from '@docker/actions-toolkit/lib/github';
|
||||
import {Toolkit} from '@docker/actions-toolkit/lib/toolkit';
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
import {getInputs, Inputs} from './context';
|
||||
import {Meta, Version} from './meta';
|
||||
|
||||
function setOutput(name: string, value: string) {
|
||||
core.setOutput(name, value);
|
||||
core.exportVariable(`DOCKER_METADATA_OUTPUT_${name.replace(/\W/g, '_').toUpperCase()}`, value);
|
||||
}
|
||||
|
||||
actionsToolkit.run(
|
||||
// main
|
||||
async () => {
|
||||
const inputs: Inputs = await getInputs();
|
||||
if (inputs.images.length == 0) {
|
||||
throw new Error(`images input required`);
|
||||
}
|
||||
|
||||
const context: Context = github.context();
|
||||
const repo: github.ReposGetResponseData = await github.repo(inputs.githubToken);
|
||||
core.startGroup(`Context info`);
|
||||
core.info(`eventName: ${context.eventName}`);
|
||||
core.info(`sha: ${context.sha}`);
|
||||
core.info(`ref: ${context.ref}`);
|
||||
core.info(`workflow: ${context.workflow}`);
|
||||
core.info(`action: ${context.action}`);
|
||||
core.info(`actor: ${context.actor}`);
|
||||
core.info(`runNumber: ${context.runNumber}`);
|
||||
core.info(`runId: ${context.runId}`);
|
||||
core.endGroup();
|
||||
const toolkit = new Toolkit({githubToken: inputs.githubToken});
|
||||
const context: Context = GitHub.context;
|
||||
const repo = await toolkit.github.repoData();
|
||||
|
||||
await core.group(`Context info`, async () => {
|
||||
core.info(`eventName: ${context.eventName}`);
|
||||
core.info(`sha: ${context.sha}`);
|
||||
core.info(`ref: ${context.ref}`);
|
||||
core.info(`workflow: ${context.workflow}`);
|
||||
core.info(`action: ${context.action}`);
|
||||
core.info(`actor: ${context.actor}`);
|
||||
core.info(`runNumber: ${context.runNumber}`);
|
||||
core.info(`runId: ${context.runId}`);
|
||||
});
|
||||
|
||||
if (core.isDebug()) {
|
||||
core.startGroup(`Webhook payload`);
|
||||
core.info(JSON.stringify(context.payload, null, 2));
|
||||
core.endGroup();
|
||||
await core.group(`Webhook payload`, async () => {
|
||||
core.info(JSON.stringify(context.payload, null, 2));
|
||||
});
|
||||
}
|
||||
|
||||
const meta: Meta = new Meta(inputs, context, repo);
|
||||
@ -37,9 +48,9 @@ async function run() {
|
||||
if (meta.version.main == undefined || meta.version.main.length == 0) {
|
||||
core.warning(`No Docker image version has been generated. Check tags input.`);
|
||||
} else {
|
||||
core.startGroup(`Docker image version`);
|
||||
core.info(version.main || '');
|
||||
core.endGroup();
|
||||
await core.group(`Docker image version`, async () => {
|
||||
core.info(version.main || '');
|
||||
});
|
||||
}
|
||||
setOutput('version', version.main || '');
|
||||
|
||||
@ -48,44 +59,35 @@ async function run() {
|
||||
if (tags.length == 0) {
|
||||
core.warning('No Docker tag has been generated. Check tags input.');
|
||||
} else {
|
||||
core.startGroup(`Docker tags`);
|
||||
for (const tag of tags) {
|
||||
core.info(tag);
|
||||
}
|
||||
core.endGroup();
|
||||
await core.group(`Docker tags`, async () => {
|
||||
for (const tag of tags) {
|
||||
core.info(tag);
|
||||
}
|
||||
});
|
||||
}
|
||||
setOutput('tags', tags.join(inputs.sepTags));
|
||||
|
||||
// Docker labels
|
||||
const labels: Array<string> = meta.getLabels();
|
||||
core.startGroup(`Docker labels`);
|
||||
for (const label of labels) {
|
||||
core.info(label);
|
||||
}
|
||||
core.endGroup();
|
||||
setOutput('labels', labels.join(inputs.sepLabels));
|
||||
await core.group(`Docker labels`, async () => {
|
||||
for (const label of labels) {
|
||||
core.info(label);
|
||||
}
|
||||
setOutput('labels', labels.join(inputs.sepLabels));
|
||||
});
|
||||
|
||||
// JSON
|
||||
const jsonOutput = meta.getJSON();
|
||||
core.startGroup(`JSON output`);
|
||||
core.info(JSON.stringify(jsonOutput, null, 2));
|
||||
core.endGroup();
|
||||
setOutput('json', JSON.stringify(jsonOutput));
|
||||
await core.group(`JSON output`, async () => {
|
||||
core.info(JSON.stringify(jsonOutput, null, 2));
|
||||
setOutput('json', JSON.stringify(jsonOutput));
|
||||
});
|
||||
|
||||
// Bake file definition
|
||||
const bakeFile: string = meta.getBakeFile();
|
||||
core.startGroup(`Bake file definition`);
|
||||
core.info(fs.readFileSync(bakeFile, 'utf8'));
|
||||
core.endGroup();
|
||||
setOutput('bake-file', bakeFile);
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await core.group(`Bake file definition`, async () => {
|
||||
core.info(fs.readFileSync(bakeFile, 'utf8'));
|
||||
setOutput('bake-file', bakeFile);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function setOutput(name: string, value: string) {
|
||||
core.setOutput(name, value);
|
||||
core.exportVariable(`DOCKER_METADATA_OUTPUT_${name.replace(/\W/g, '_').toUpperCase()}`, value);
|
||||
}
|
||||
|
||||
run();
|
||||
);
|
||||
|
16
src/meta.ts
16
src/meta.ts
@ -4,13 +4,15 @@ import * as path from 'path';
|
||||
import moment from 'moment-timezone';
|
||||
import * as pep440 from '@renovate/pep440';
|
||||
import * as semver from 'semver';
|
||||
import {Inputs, tmpDir} from './context';
|
||||
import {ReposGetResponseData} from './github';
|
||||
import * as core from '@actions/core';
|
||||
import {Context} from '@actions/github/lib/context';
|
||||
import {Context as ToolkitContext} from '@docker/actions-toolkit/lib/context';
|
||||
import {GitHubRepo} from '@docker/actions-toolkit/lib/types/github';
|
||||
|
||||
import {Inputs} from './context';
|
||||
import * as icl from './image';
|
||||
import * as tcl from './tag';
|
||||
import * as fcl from './flavor';
|
||||
import * as core from '@actions/core';
|
||||
import {Context} from '@actions/github/lib/context';
|
||||
|
||||
export interface Version {
|
||||
main: string | undefined;
|
||||
@ -23,13 +25,13 @@ export class Meta {
|
||||
|
||||
private readonly inputs: Inputs;
|
||||
private readonly context: Context;
|
||||
private readonly repo: ReposGetResponseData;
|
||||
private readonly repo: GitHubRepo;
|
||||
private readonly images: icl.Image[];
|
||||
private readonly tags: tcl.Tag[];
|
||||
private readonly flavor: fcl.Flavor;
|
||||
private readonly date: Date;
|
||||
|
||||
constructor(inputs: Inputs, context: Context, repo: ReposGetResponseData) {
|
||||
constructor(inputs: Inputs, context: Context, repo: GitHubRepo) {
|
||||
// Needs to override Git reference with pr ref instead of upstream branch ref
|
||||
// for pull_request_target event
|
||||
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target
|
||||
@ -498,7 +500,7 @@ export class Meta {
|
||||
}
|
||||
|
||||
public getBakeFile(): string {
|
||||
const bakeFile = path.join(tmpDir(), 'docker-metadata-action-bake.json').split(path.sep).join(path.posix.sep);
|
||||
const bakeFile = path.join(ToolkitContext.tmpDir(), 'docker-metadata-action-bake.json');
|
||||
fs.writeFileSync(
|
||||
bakeFile,
|
||||
JSON.stringify(
|
||||
|
@ -1,20 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true,
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"strict": true,
|
||||
"newLine": "lf",
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"noImplicitAny": false,
|
||||
"resolveJsonModule": true,
|
||||
"useUnknownInCatchVariables": false,
|
||||
},
|
||||
"exclude": [
|
||||
"./__tests__/**/*",
|
||||
"./lib/**/*",
|
||||
"node_modules",
|
||||
"**/*.test.ts",
|
||||
"jest.config.ts"
|
||||
]
|
||||
}
|
||||
|
73
yarn.lock
73
yarn.lock
@ -2,7 +2,7 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@actions/core@^1.10.0":
|
||||
"@actions/core@^1.10.0", "@actions/core@^1.2.6":
|
||||
version "1.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.0.tgz#44551c3c71163949a2f06e94d9ca2157a0cfac4f"
|
||||
integrity sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==
|
||||
@ -10,6 +10,13 @@
|
||||
"@actions/http-client" "^2.0.1"
|
||||
uuid "^8.3.2"
|
||||
|
||||
"@actions/exec@^1.0.0", "@actions/exec@^1.1.1":
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.1.1.tgz#2e43f28c54022537172819a7cf886c844221a611"
|
||||
integrity sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==
|
||||
dependencies:
|
||||
"@actions/io" "^1.0.1"
|
||||
|
||||
"@actions/github@^5.1.1":
|
||||
version "5.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/github/-/github-5.1.1.tgz#40b9b9e1323a5efcf4ff7dadd33d8ea51651bbcb"
|
||||
@ -27,6 +34,23 @@
|
||||
dependencies:
|
||||
tunnel "^0.0.6"
|
||||
|
||||
"@actions/io@^1.0.1", "@actions/io@^1.1.1", "@actions/io@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.1.2.tgz#766ac09674a289ce0f1550ffe0a6eac9261a8ea9"
|
||||
integrity sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==
|
||||
|
||||
"@actions/tool-cache@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@actions/tool-cache/-/tool-cache-2.0.1.tgz#8a649b9c07838d9d750c9864814e66a7660ab720"
|
||||
integrity sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==
|
||||
dependencies:
|
||||
"@actions/core" "^1.2.6"
|
||||
"@actions/exec" "^1.0.0"
|
||||
"@actions/http-client" "^2.0.1"
|
||||
"@actions/io" "^1.1.1"
|
||||
semver "^6.1.0"
|
||||
uuid "^3.3.2"
|
||||
|
||||
"@ampproject/remapping@^2.1.0":
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.1.2.tgz#4edca94973ded9630d20101cd8559cedb8d8bd34"
|
||||
@ -542,6 +566,22 @@
|
||||
dependencies:
|
||||
"@cspotcode/source-map-consumer" "0.8.0"
|
||||
|
||||
"@docker/actions-toolkit@^0.1.0-beta.14":
|
||||
version "0.1.0-beta.14"
|
||||
resolved "https://registry.yarnpkg.com/@docker/actions-toolkit/-/actions-toolkit-0.1.0-beta.14.tgz#82fa8a6b9802a7f770fde3ddcef1cf591739a80b"
|
||||
integrity sha512-N+aqiO0E2ygoaBORN8fx4K7j/CzJ2nCSgOewtDm0gdzrch8qZmTU14e3oNAbZlP8Q34Lk45KKefm5wDfLipRqg==
|
||||
dependencies:
|
||||
"@actions/core" "^1.10.0"
|
||||
"@actions/exec" "^1.1.1"
|
||||
"@actions/github" "^5.1.1"
|
||||
"@actions/http-client" "^2.0.1"
|
||||
"@actions/io" "^1.1.2"
|
||||
"@actions/tool-cache" "^2.0.1"
|
||||
csv-parse "^5.3.5"
|
||||
jwt-decode "^3.1.2"
|
||||
semver "^7.3.8"
|
||||
tmp "^0.2.1"
|
||||
|
||||
"@eslint/eslintrc@^1.2.1":
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.1.tgz#8b5e1c49f4077235516bc9ec7d41378c0f69b8c6"
|
||||
@ -1610,6 +1650,11 @@ csv-parse@*, csv-parse@^5.3.3:
|
||||
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.3.3.tgz#3b75d2279e2edb550cbc54c65b25cbbf3d0033ad"
|
||||
integrity sha512-kEWkAPleNEdhFNkHQpFHu9RYPogsFj3dx6bCxL847fsiLgidzWg0z/O0B1kVWMJUc5ky64zGp18LX2T3DQrOfw==
|
||||
|
||||
csv-parse@^5.3.5:
|
||||
version "5.3.5"
|
||||
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.3.5.tgz#9924bbba9f7056122f06b7af18edc1a7f022ce99"
|
||||
integrity sha512-8O5KTIRtwmtD3+EVfW6BCgbwZqJbhTYsQZry12F1TP5RUp0sD9tp1UnCWic3n0mLOhzeocYaCZNYxOGSg3dmmQ==
|
||||
|
||||
data-urls@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b"
|
||||
@ -2826,6 +2871,11 @@ json5@2.x, json5@^2.1.2:
|
||||
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
|
||||
integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
|
||||
|
||||
jwt-decode@^3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/jwt-decode/-/jwt-decode-3.1.2.tgz#3fb319f3675a2df0c2895c8f5e9fa4b67b04ed59"
|
||||
integrity sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==
|
||||
|
||||
kleur@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
|
||||
@ -3292,11 +3342,18 @@ semver@7.x, semver@^7.3.2, semver@^7.3.5, semver@^7.3.7:
|
||||
dependencies:
|
||||
lru-cache "^6.0.0"
|
||||
|
||||
semver@^6.0.0, semver@^6.3.0:
|
||||
semver@^6.0.0, semver@^6.1.0, semver@^6.3.0:
|
||||
version "6.3.0"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
|
||||
integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
|
||||
|
||||
semver@^7.3.8:
|
||||
version "7.3.8"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798"
|
||||
integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==
|
||||
dependencies:
|
||||
lru-cache "^6.0.0"
|
||||
|
||||
shebang-command@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
|
||||
@ -3476,6 +3533,13 @@ throat@^6.0.1:
|
||||
resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375"
|
||||
integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==
|
||||
|
||||
tmp@^0.2.1:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14"
|
||||
integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==
|
||||
dependencies:
|
||||
rimraf "^3.0.0"
|
||||
|
||||
tmpl@1.0.x:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
|
||||
@ -3627,6 +3691,11 @@ uri-js@^4.2.2:
|
||||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
uuid@^3.3.2:
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
|
||||
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
|
||||
|
||||
uuid@^8.3.2:
|
||||
version "8.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
|
||||
|
Loading…
Reference in New Issue
Block a user