Enhancement: Allow usage when GITHUB_REF or ACTIONS_CACHE_REF are defined

This commit is contained in:
Andreas Möller 2020-05-16 23:05:56 +02:00
parent 16a133d9a7
commit 77fd223211
No known key found for this signature in database
GPG key ID: 9FB20A0BAF60E11F
7 changed files with 792 additions and 638 deletions

View file

@ -4,7 +4,7 @@ import { promises as fs } from "fs";
import * as os from "os";
import * as path from "path";
import { Events, Outputs, RefKey, State } from "../src/constants";
import { Events, Outputs, RefKeys, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils";
@ -19,7 +19,8 @@ function getTempDir(): string {
afterEach(() => {
delete process.env[Events.Key];
delete process.env[RefKey];
RefKeys.forEach(refKey => delete process.env[refKey]);
});
afterAll(async () => {
@ -326,15 +327,22 @@ test("resolvePaths exclusion pattern returns not found", async () => {
}
});
test("isValidEvent returns true for event that has a ref", () => {
const refKeySet = RefKeys.map(refKey => {
return [refKey];
});
test.each(refKeySet)(
"isValidEvent returns true for event that has a ref",
refKey => {
const event = Events.Push;
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
process.env[refKey] = "ref/heads/feature";
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});
}
);
test("unlinkFile unlinks file", async () => {
const testDirectory = await fs.mkdtemp("unlinkFileTest");

View file

@ -7,7 +7,7 @@ import {
CompressionMethod,
Events,
Inputs,
RefKey
RefKeys
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
@ -40,13 +40,17 @@ beforeAll(() => {
beforeEach(() => {
process.env[Events.Key] = Events.Push;
process.env[RefKey] = "refs/heads/feature-branch";
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
delete process.env[RefKey];
RefKeys.forEach(refKey => delete process.env[refKey]);
});
const refKeySet = RefKeys.map(refKey => {
return [refKey, `refs/heads/feature/${refKey.toLowerCase()}`];
});
test("restore with invalid event outputs warning", async () => {
@ -54,7 +58,6 @@ test("restore with invalid event outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
delete process.env[RefKey];
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
@ -62,16 +65,23 @@ test("restore with invalid event outputs warning", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with no path should fail", async () => {
test.each(refKeySet)(
"restore with no path should fail",
async (refKey, ref) => {
process.env[refKey] = ref;
const failedMock = jest.spyOn(core, "setFailed");
await run();
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
expect(failedMock).not.toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
}
);
test.each(refKeySet)("restore with no key", async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
await run();
@ -80,7 +90,11 @@ test("restore with no key", async () => {
);
});
test("restore with too many keys should fail", async () => {
test.each(refKeySet)(
"restore with too many keys should fail",
async (refKey, ref) => {
process.env[refKey] = ref;
const key = "node-test";
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
testUtils.setInputs({
@ -93,9 +107,14 @@ test("restore with too many keys should fail", async () => {
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: Keys are limited to a maximum of 10.`
);
});
}
);
test.each(refKeySet)(
"restore with large key should fail",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with large key should fail", async () => {
const key = "foo".repeat(512); // Over the 512 character limit
testUtils.setInputs({
path: "node_modules",
@ -106,9 +125,14 @@ test("restore with large key should fail", async () => {
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
);
});
}
);
test.each(refKeySet)(
"restore with invalid key should fail",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with invalid key should fail", async () => {
const key = "comma,comma";
testUtils.setInputs({
path: "node_modules",
@ -119,9 +143,12 @@ test("restore with invalid key should fail", async () => {
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot contain commas.`
);
});
}
);
test.each(refKeySet)("restore with no cache found", async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with no cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
@ -147,7 +174,11 @@ test("restore with no cache found", async () => {
);
});
test("restore with server error should fail", async () => {
test.each(refKeySet)(
"restore with server error should fail",
async (refKey, ref) => {
process.env[refKey] = ref;
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
@ -163,7 +194,10 @@ test("restore with server error should fail", async () => {
throw new Error("HTTP Error Occurred");
});
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(
actionUtils,
"setCacheHitOutput"
);
await run();
@ -176,9 +210,14 @@ test("restore with server error should fail", async () => {
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(failedMock).toHaveBeenCalledTimes(0);
});
}
);
test.each(refKeySet)(
"restore with restore keys and no cache found",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with restore keys and no cache found", async () => {
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
@ -204,9 +243,14 @@ test("restore with restore keys and no cache found", async () => {
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}`
);
});
}
);
test.each(refKeySet)(
"restore with gzip compressed cache found",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with gzip compressed cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
@ -247,7 +291,10 @@ test("restore with gzip compressed cache found", async () => {
const extractTarMock = jest.spyOn(tar, "extractTar");
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(
actionUtils,
"setCacheHitOutput"
);
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
@ -277,12 +324,19 @@ test("restore with gzip compressed cache found", async () => {
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${key}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);
test.each(refKeySet)(
"restore with a pull request event and zstd compressed cache found",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with a pull request event and zstd compressed cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
@ -324,7 +378,10 @@ test("restore with a pull request event and zstd compressed cache found", async
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(
actionUtils,
"setCacheHitOutput"
);
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
@ -343,7 +400,9 @@ test("restore with a pull request event and zstd compressed cache found", async
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(infoMock).toHaveBeenCalledWith(
`Cache Size: ~60 MB (62915000 B)`
);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
@ -351,12 +410,19 @@ test("restore with a pull request event and zstd compressed cache found", async
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${key}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);
test.each(refKeySet)(
"restore with cache found for restore key",
async (refKey, ref) => {
process.env[refKey] = ref;
test("restore with cache found for restore key", async () => {
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
@ -398,7 +464,10 @@ test("restore with cache found for restore key", async () => {
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const setCacheHitOutputMock = jest.spyOn(
actionUtils,
"setCacheHitOutput"
);
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
@ -430,4 +499,5 @@ test("restore with cache found for restore key", async () => {
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);

View file

@ -7,7 +7,7 @@ import {
CompressionMethod,
Events,
Inputs,
RefKey
RefKeys
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
@ -60,29 +60,40 @@ beforeAll(() => {
beforeEach(() => {
process.env[Events.Key] = Events.Push;
process.env[RefKey] = "refs/heads/feature-branch";
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
delete process.env[RefKey];
RefKeys.forEach(refKey => delete process.env[refKey]);
});
test("save with invalid event outputs warning", async () => {
const refKeySet = RefKeys.map(refKey => {
return [refKey, `refs/heads/feature/${refKey.toLowerCase()}`];
});
test.each(refKeySet)(
"save with invalid event outputs warning",
async refKey => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
delete process.env[RefKey];
delete process.env[refKey];
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
}
);
test.each(refKeySet)(
"save with no primary key in state outputs warning",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with no primary key in state outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
@ -110,13 +121,19 @@ test("save with no primary key in state outputs warning", async () => {
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
}
);
test.each(refKeySet)(
"save with exact match returns early",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with exact match returns early", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: primaryKey,
scope: "refs/heads/master",
@ -145,13 +162,19 @@ test("save with exact match returns early", async () => {
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
}
);
test.each(refKeySet)(
"save with missing input outputs warning",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with missing input outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
@ -176,13 +199,19 @@ test("save with missing input outputs warning", async () => {
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
}
);
test.each(refKeySet)(
"save with large cache outputs warning",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with large cache outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
@ -207,9 +236,11 @@ test("save with large cache outputs warning", async () => {
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(
() => {
return cacheSize;
});
}
);
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
@ -231,14 +262,20 @@ test("save with large cache outputs warning", async () => {
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);
test.each(refKeySet)(
"save with reserve cache failure outputs warning",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
@ -288,13 +325,19 @@ test("save with reserve cache failure outputs warning", async () => {
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);
test.each(refKeySet)(
"save with server error outputs warning",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
@ -360,12 +403,18 @@ test("save with server error outputs warning", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);
test.each(refKeySet)(
"save with valid inputs uploads a cache",
async (refKey, ref) => {
process.env[refKey] = ref;
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey =
"Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
@ -424,4 +473,5 @@ test("save with valid inputs uploads a cache", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
}
);

15
dist/restore/index.js vendored
View file

@ -3345,10 +3345,16 @@ function resolvePaths(patterns) {
});
}
exports.resolvePaths = resolvePaths;
// Cache token authorized for all events that are tied to a ref
// Cache token authorized for events where a reference is defined
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent() {
return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]);
for (let i = 0; i < constants_1.RefKeys.length; i++) {
let refKey = constants_1.RefKeys[i];
if (refKey in process.env) {
return Boolean(process.env[refKey]);
}
}
return false;
}
exports.isValidEvent = isValidEvent;
function unlinkFile(path) {
@ -4607,7 +4613,10 @@ var CompressionMethod;
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports.SocketTimeout = 5000;
exports.RefKey = "GITHUB_REF";
exports.RefKeys = [
"ACTIONS_CACHE_REF",
"GITHUB_REF",
];
/***/ }),

15
dist/save/index.js vendored
View file

@ -3345,10 +3345,16 @@ function resolvePaths(patterns) {
});
}
exports.resolvePaths = resolvePaths;
// Cache token authorized for all events that are tied to a ref
// Cache token authorized for events where a reference is defined
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent() {
return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]);
for (let i = 0; i < constants_1.RefKeys.length; i++) {
let refKey = constants_1.RefKeys[i];
if (refKey in process.env) {
return Boolean(process.env[refKey]);
}
}
return false;
}
exports.isValidEvent = isValidEvent;
function unlinkFile(path) {
@ -4694,7 +4700,10 @@ var CompressionMethod;
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports.SocketTimeout = 5000;
exports.RefKey = "GITHUB_REF";
exports.RefKeys = [
"ACTIONS_CACHE_REF",
"GITHUB_REF",
];
/***/ }),

View file

@ -34,4 +34,4 @@ export enum CompressionMethod {
// is aborted.
export const SocketTimeout = 5000;
export const RefKey = "GITHUB_REF";
export const RefKeys = ["ACTIONS_CACHE_REF", "GITHUB_REF"];

View file

@ -11,7 +11,7 @@ import {
CacheFilename,
CompressionMethod,
Outputs,
RefKey,
RefKeys,
State
} from "../constants";
import { ArtifactCacheEntry } from "../contracts";
@ -108,10 +108,18 @@ export async function resolvePaths(patterns: string[]): Promise<string[]> {
return paths;
}
// Cache token authorized for all events that are tied to a ref
// Cache token authorized for events where a reference is defined
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
for (let i = 0; i < RefKeys.length; i++) {
let refKey = RefKeys[i];
if (refKey in process.env) {
return Boolean(process.env[refKey])
}
}
return false;
}
export function unlinkFile(path: fs.PathLike): Promise<void> {