mirror of
https://code.forgejo.org/actions/cache.git
synced 2025-04-16 16:01:22 +02:00
Allow for multiple line-delimited paths to cache
This commit is contained in:
parent
826785142a
commit
84cead4a82
8 changed files with 116 additions and 58 deletions
|
@ -59,7 +59,8 @@ test("restore with invalid event outputs warning", async () => {
|
|||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
await run();
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
// TODO: this shouldn't be necessary if tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
});
|
||||
|
@ -201,7 +202,6 @@ test("restore with restore keys and no cache found", async () => {
|
|||
|
||||
test("restore with cache found", async () => {
|
||||
const key = "node-test";
|
||||
const cachePath = path.resolve("node_modules");
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
|
@ -255,7 +255,7 @@ test("restore with cache found", async () => {
|
|||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
@ -266,7 +266,6 @@ test("restore with cache found", async () => {
|
|||
|
||||
test("restore with a pull request event and cache found", async () => {
|
||||
const key = "node-test";
|
||||
const cachePath = path.resolve("node_modules");
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
|
@ -323,7 +322,7 @@ test("restore with a pull request event and cache found", async () => {
|
|||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
@ -335,7 +334,6 @@ test("restore with a pull request event and cache found", async () => {
|
|||
test("restore with cache found for restore key", async () => {
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
const cachePath = path.resolve("node_modules");
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key,
|
||||
|
@ -391,7 +389,7 @@ test("restore with cache found for restore key", async () => {
|
|||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
|
|
@ -159,10 +159,11 @@ test("save with missing input outputs warning", async () => {
|
|||
|
||||
await run();
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
// TODO: this shouldn't be necessary if tarball contains entries relative to workspace
|
||||
expect(logWarningMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
|
@ -189,7 +190,7 @@ test("save with large cache outputs warning", async () => {
|
|||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePath = path.resolve(inputPath);
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
@ -204,7 +205,7 @@ test("save with large cache outputs warning", async () => {
|
|||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
|
@ -288,7 +289,7 @@ test("save with server error outputs warning", async () => {
|
|||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePath = path.resolve(inputPath);
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
|
@ -314,7 +315,7 @@ test("save with server error outputs warning", async () => {
|
|||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
||||
|
@ -347,7 +348,7 @@ test("save with valid inputs uploads a cache", async () => {
|
|||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePath = path.resolve(inputPath);
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
|
@ -369,7 +370,7 @@ test("save with valid inputs uploads a cache", async () => {
|
|||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
||||
|
|
|
@ -9,6 +9,12 @@ beforeAll(() => {
|
|||
jest.spyOn(io, "which").mockImplementation(tool => {
|
||||
return Promise.resolve(tool);
|
||||
});
|
||||
|
||||
process.env["GITHUB_WORKSPACE"] = process.cwd();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env["GITHUB_WORKSPACE"] = undefined;
|
||||
});
|
||||
|
||||
test("extract tar", async () => {
|
||||
|
@ -16,10 +22,11 @@ test("extract tar", async () => {
|
|||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archivePath = "cache.tar";
|
||||
const targetDirectory = "~/.npm/cache";
|
||||
await tar.extractTar(archivePath, targetDirectory);
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
|
||||
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
||||
await tar.extractTar(archivePath);
|
||||
|
||||
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
const tarPath = IS_WINDOWS
|
||||
|
@ -30,8 +37,9 @@ test("extract tar", async () => {
|
|||
"-xz",
|
||||
"-f",
|
||||
archivePath,
|
||||
"-P",
|
||||
"-C",
|
||||
targetDirectory
|
||||
workspace
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -39,8 +47,10 @@ test("create tar", async () => {
|
|||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archivePath = "cache.tar";
|
||||
const sourceDirectory = "~/.npm/cache";
|
||||
await tar.createTar(archivePath, sourceDirectory);
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||
|
||||
await tar.createTar(archivePath, sourceDirectories);
|
||||
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
const tarPath = IS_WINDOWS
|
||||
|
@ -52,7 +62,7 @@ test("create tar", async () => {
|
|||
"-f",
|
||||
archivePath,
|
||||
"-C",
|
||||
sourceDirectory,
|
||||
"."
|
||||
workspace,
|
||||
sourceDirectories.join(" ")
|
||||
]);
|
||||
});
|
||||
|
|
34
dist/restore/index.js
vendored
34
dist/restore/index.js
vendored
|
@ -2704,6 +2704,7 @@ var Inputs;
|
|||
(function (Inputs) {
|
||||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["Paths"] = "paths";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
|
@ -2802,8 +2803,10 @@ function run() {
|
|||
.join(", ")} events are supported at this time.`);
|
||||
return;
|
||||
}
|
||||
const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
// const cachePath = utils.resolvePath(
|
||||
// core.getInput(Inputs.Path, { required: true })
|
||||
// );
|
||||
// core.debug(`Cache Path: ${cachePath}`);
|
||||
const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true });
|
||||
core.saveState(constants_1.State.CacheKey, primaryKey);
|
||||
const restoreKeys = core
|
||||
|
@ -2831,7 +2834,7 @@ function run() {
|
|||
try {
|
||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys);
|
||||
if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
|
||||
core.info(`Cache not found for input keys: ${keys.join(", ")}.`);
|
||||
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||
return;
|
||||
}
|
||||
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
|
||||
|
@ -2842,7 +2845,7 @@ function run() {
|
|||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath);
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
yield tar_1.extractTar(archivePath, cachePath);
|
||||
yield tar_1.extractTar(archivePath);
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`);
|
||||
|
@ -2959,18 +2962,31 @@ function execTar(args) {
|
|||
}
|
||||
});
|
||||
}
|
||||
function extractTar(archivePath, targetDirectory) {
|
||||
function getWorkingDirectory() {
|
||||
var _a;
|
||||
return _a = process.env.GITHUB_WORKSPACE, (_a !== null && _a !== void 0 ? _a : process.cwd());
|
||||
}
|
||||
function extractTar(archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Create directory to extract tar into
|
||||
yield io.mkdirP(targetDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
yield io.mkdirP(workingDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-P", "-C", workingDirectory];
|
||||
yield execTar(args);
|
||||
});
|
||||
}
|
||||
exports.extractTar = extractTar;
|
||||
function createTar(archivePath, sourceDirectory) {
|
||||
function createTar(archivePath, sourceDirectories) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
const args = [
|
||||
"-cz",
|
||||
"-f",
|
||||
archivePath,
|
||||
"-C",
|
||||
workingDirectory,
|
||||
sourceDirectories.join(" ")
|
||||
];
|
||||
yield execTar(args);
|
||||
});
|
||||
}
|
||||
|
|
35
dist/save/index.js
vendored
35
dist/save/index.js
vendored
|
@ -2749,11 +2749,16 @@ function run() {
|
|||
return;
|
||||
}
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
const cachePaths = core
|
||||
.getInput(constants_1.Inputs.Path, { required: false })
|
||||
.split("\n")
|
||||
.filter(x => x !== "")
|
||||
.map(x => utils.resolvePath(x));
|
||||
core.debug("Cache Paths:");
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
yield tar_1.createTar(archivePath, cachePath);
|
||||
yield tar_1.createTar(archivePath, cachePaths);
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
|
@ -2785,6 +2790,7 @@ var Inputs;
|
|||
(function (Inputs) {
|
||||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["Paths"] = "paths";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
|
@ -2940,18 +2946,31 @@ function execTar(args) {
|
|||
}
|
||||
});
|
||||
}
|
||||
function extractTar(archivePath, targetDirectory) {
|
||||
function getWorkingDirectory() {
|
||||
var _a;
|
||||
return _a = process.env.GITHUB_WORKSPACE, (_a !== null && _a !== void 0 ? _a : process.cwd());
|
||||
}
|
||||
function extractTar(archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Create directory to extract tar into
|
||||
yield io.mkdirP(targetDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
yield io.mkdirP(workingDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-P", "-C", workingDirectory];
|
||||
yield execTar(args);
|
||||
});
|
||||
}
|
||||
exports.extractTar = extractTar;
|
||||
function createTar(archivePath, sourceDirectory) {
|
||||
function createTar(archivePath, sourceDirectories) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
const args = [
|
||||
"-cz",
|
||||
"-f",
|
||||
archivePath,
|
||||
"-C",
|
||||
workingDirectory,
|
||||
sourceDirectories.join(" ")
|
||||
];
|
||||
yield execTar(args);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@ async function run(): Promise<void> {
|
|||
return;
|
||||
}
|
||||
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
// const cachePath = utils.resolvePath(
|
||||
// core.getInput(Inputs.Path, { required: true })
|
||||
// );
|
||||
// core.debug(`Cache Path: ${cachePath}`);
|
||||
|
||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||
core.saveState(State.CacheKey, primaryKey);
|
||||
|
@ -87,7 +87,7 @@ async function run(): Promise<void> {
|
|||
)} MB (${archiveFileSize} B)`
|
||||
);
|
||||
|
||||
await extractTar(archivePath, cachePath);
|
||||
await extractTar(archivePath);
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||
primaryKey,
|
||||
|
|
14
src/save.ts
14
src/save.ts
|
@ -43,10 +43,14 @@ async function run(): Promise<void> {
|
|||
return;
|
||||
}
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
const cachePaths = core
|
||||
.getInput(Inputs.Path)
|
||||
.split("\n")
|
||||
.filter(x => x !== "")
|
||||
.map(x => utils.resolvePath(x));
|
||||
|
||||
core.debug("Cache Paths:");
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
|
||||
const archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
|
@ -54,7 +58,7 @@ async function run(): Promise<void> {
|
|||
);
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
|
||||
await createTar(archivePath, cachePath);
|
||||
await createTar(archivePath, cachePaths);
|
||||
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
|
|
26
src/tar.ts
26
src/tar.ts
|
@ -28,20 +28,30 @@ async function execTar(args: string[]): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
export async function extractTar(
|
||||
archivePath: string,
|
||||
targetDirectory: string
|
||||
): Promise<void> {
|
||||
function getWorkingDirectory(): string {
|
||||
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||
}
|
||||
|
||||
export async function extractTar(archivePath: string): Promise<void> {
|
||||
// Create directory to extract tar into
|
||||
await io.mkdirP(targetDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
await io.mkdirP(workingDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-P", "-C", workingDirectory];
|
||||
await execTar(args);
|
||||
}
|
||||
|
||||
export async function createTar(
|
||||
archivePath: string,
|
||||
sourceDirectory: string
|
||||
sourceDirectories: string[]
|
||||
): Promise<void> {
|
||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
const args = [
|
||||
"-cz",
|
||||
"-f",
|
||||
archivePath,
|
||||
"-C",
|
||||
workingDirectory,
|
||||
sourceDirectories.join(" ")
|
||||
];
|
||||
await execTar(args);
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue