unit test coverage for caching multiple dirs

This commit is contained in:
Ethan Dennis 2020-03-06 14:39:03 -08:00
parent e0d1942524
commit 057d9de723
No known key found for this signature in database
GPG key ID: 32E74B75DB4065DD
11 changed files with 161 additions and 80 deletions

View file

@ -1,29 +0,0 @@
name: Cache
on:
pull_request:
branches:
- master
jobs:
build:
runs-on: self-hosted
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: '12.x'
- name: Restore npm cache
uses: ./
id: cache
with:
path: |
node_modules
dist
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('salt.txt') }}
- run: npm install
if: steps.cache.outputs.cache-hit != 'true'
- run: npm run build
if: steps.cache.outputs.cache-hit != 'true'

View file

@ -1,11 +1,11 @@
name: Tests name: Tests
on: on:
# pull_request: pull_request:
# branches: branches:
# - master - master
# paths-ignore: paths-ignore:
# - '**.md' - '**.md'
push: push:
branches: branches:
- master - master

View file

@ -1,7 +1,8 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as glob from "@actions/glob"; import * as io from "@actions/io";
import * as os from "os"; import * as os from "os";
import * as path from "path"; import * as path from "path";
import { promises as fs } from "fs";
import { Events, Outputs, State } from "../src/constants"; import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts"; import { ArtifactCacheEntry } from "../src/contracts";
@ -10,10 +11,19 @@ import * as actionUtils from "../src/utils/actionUtils";
jest.mock("@actions/core"); jest.mock("@actions/core");
jest.mock("os"); jest.mock("os");
function getTempDir(): string {
return path.join(__dirname, "_temp", "actionUtils");
}
afterEach(() => { afterEach(() => {
delete process.env[Events.Key]; delete process.env[Events.Key];
}); });
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await io.rmRF(getTempDir());
});
test("getArchiveFileSize returns file size", () => { test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
@ -183,13 +193,34 @@ test("isValidEvent returns false for unknown event", () => {
}); });
test("resolvePaths with no ~ in path", async () => { test("resolvePaths with no ~ in path", async () => {
// TODO: these test paths will need to exist const filePath = ".cache";
const filePath = ".cache/yarn";
const resolvedPath = await actionUtils.resolvePaths([filePath]); // Create the following layout:
// cwd
// cwd/.cache
// cwd/.cache/file.txt
const expectedPath = [path.resolve(filePath)]; const root = path.join(getTempDir(), "no-tilde");
expect(resolvedPath).toStrictEqual(expectedPath); // tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
const cache = path.join(root, ".cache");
await fs.mkdir(cache, { recursive: true });
await fs.writeFile(path.join(cache, "file.txt"), "cached");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [filePath];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
}); });
test("resolvePaths with ~ in path", async () => { test("resolvePaths with ~ in path", async () => {
@ -201,26 +232,87 @@ test("resolvePaths with ~ in path", async () => {
return homedir; return homedir;
}); });
const root = getTempDir();
process.env["GITHUB_WORKSPACE"] = root;
const resolvedPath = await actionUtils.resolvePaths([filePath]); const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [path.join(homedir, ".cache/yarn")]; const expectedPath = [
path.relative(root, path.join(homedir, ".cache/yarn"))
];
expect(resolvedPath).toStrictEqual(expectedPath); expect(resolvedPath).toStrictEqual(expectedPath);
}); });
test("resolvePaths with home not found", () => { test("resolvePaths with home not found", async () => {
const filePath = "~/.cache/yarn"; const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir"); const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => { homedirMock.mockImplementation(() => {
return ""; return "";
}); });
// const globMock = jest.spyOn(glob, "homedir");
// globMock.mockImplementation(() => "");
expect(async () => await actionUtils.resolvePaths([filePath])).toThrow( await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
"Unable to resolve `~` to HOME" "Unable to determine HOME directory"
); );
}); });
test("resolvePaths inclusion pattern returns found", async () => {
const pattern = "*.ts";
// Create the following layout:
// inclusion-patterns
// inclusion-patterns/miss.txt
// inclusion-patterns/test.ts
const root = path.join(getTempDir(), "inclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([pattern]);
const expectedPath = ["test.ts"];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePaths exclusion pattern returns not found", async () => {
const patterns = ["*.ts", "!test.ts"];
// Create the following layout:
// exclusion-patterns
// exclusion-patterns/miss.txt
// exclusion-patterns/test.ts
const root = path.join(getTempDir(), "exclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "no match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths(patterns);
const expectedPath = [];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("isValidEvent returns true for push event", () => { test("isValidEvent returns true for push event", () => {
const event = Events.Push; const event = Events.Push;
process.env[Events.Key] = event; process.env[Events.Key] = event;

View file

@ -55,7 +55,7 @@ test("restore with invalid event outputs warning", async () => {
test("restore with no path should fail", async () => { test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
await run(); await run();
// TODO: this shouldn't be necessary if tarball contains entries relative to workspace // this input isn't necessary for restore b/c tarball contains entries relative to workspace
expect(failedMock).not.toHaveBeenCalledWith( expect(failedMock).not.toHaveBeenCalledWith(
"Input required and not supplied: path" "Input required and not supplied: path"
); );

View file

@ -1,7 +1,7 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as path from "path"; import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient"; import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants"; import { Events, Inputs, CacheFilename } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts"; import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save"; import run from "../src/save";
import * as tar from "../src/tar"; import * as tar from "../src/tar";
@ -204,10 +204,10 @@ test("save with large cache outputs warning", async () => {
await run(); await run();
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
expect(createTarMock).toHaveBeenCalledTimes(1); expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths); expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith( expect(logWarningMock).toHaveBeenCalledWith(
@ -314,13 +314,14 @@ test("save with server error outputs warning", async () => {
expect(reserveCacheMock).toHaveBeenCalledTimes(1); expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename);
expect(createTarMock).toHaveBeenCalledTimes(1); expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths); expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
@ -369,13 +370,14 @@ test("save with valid inputs uploads a cache", async () => {
expect(reserveCacheMock).toHaveBeenCalledTimes(1); expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename);
expect(createTarMock).toHaveBeenCalledTimes(1); expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePaths); expect(createTarMock).toHaveBeenCalledWith(archiveFolder, cachePaths);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
}); });

View file

@ -1,20 +1,29 @@
import * as exec from "@actions/exec"; import * as exec from "@actions/exec";
import * as io from "@actions/io"; import * as io from "@actions/io";
import { promises as fs } from "fs";
import * as path from "path";
import * as tar from "../src/tar"; import * as tar from "../src/tar";
import { CacheFilename } from "../src/constants";
jest.mock("@actions/exec"); jest.mock("@actions/exec");
jest.mock("@actions/io"); jest.mock("@actions/io");
beforeAll(() => { function getTempDir(): string {
return path.join(__dirname, "_temp", "tar");
}
beforeAll(async () => {
jest.spyOn(io, "which").mockImplementation(tool => { jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool); return Promise.resolve(tool);
}); });
process.env["GITHUB_WORKSPACE"] = process.cwd(); process.env["GITHUB_WORKSPACE"] = process.cwd();
await jest.requireActual("@actions/io").rmRF(getTempDir());
}); });
afterAll(() => { afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"]; delete process.env["GITHUB_WORKSPACE"];
await jest.requireActual("@actions/io").rmRF(getTempDir());
}); });
test("extract tar", async () => { test("extract tar", async () => {
@ -33,36 +42,43 @@ test("extract tar", async () => {
? `${process.env["windir"]}\\System32\\tar.exe` ? `${process.env["windir"]}\\System32\\tar.exe`
: "tar"; : "tar";
expect(execMock).toHaveBeenCalledTimes(1); expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ expect(execMock).toHaveBeenCalledWith(
"-xz", `"${tarPath}"`,
"-f", ["-xz", "-f", archivePath, "-P", "-C", workspace],
archivePath, { cwd: undefined }
"-P", );
"-C",
workspace
]);
}); });
test("create tar", async () => { test("create tar", async () => {
const execMock = jest.spyOn(exec, "exec"); const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar"; const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"]; const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await tar.createTar(archivePath, sourceDirectories); await fs.mkdir(archiveFolder, { recursive: true });
await tar.createTar(archiveFolder, sourceDirectories);
const IS_WINDOWS = process.platform === "win32"; const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe` ? `${process.env["windir"]}\\System32\\tar.exe`
: "tar"; : "tar";
expect(execMock).toHaveBeenCalledTimes(1); expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ expect(execMock).toHaveBeenCalledWith(
"-cz", `"${tarPath}"`,
"-f", [
archivePath, "-cz",
"-C", "-f",
workspace, CacheFilename,
sourceDirectories.join(" ") "-C",
]); workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
}); });

View file

@ -4975,7 +4975,7 @@ function extractTar(archivePath) {
exports.extractTar = extractTar; exports.extractTar = extractTar;
function createTar(archiveFolder, sourceDirectories) { function createTar(archiveFolder, sourceDirectories) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// TODO: will want to stream sourceDirectories into tar // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt"; const manifestFilename = "manifest.txt";
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();

2
dist/save/index.js vendored
View file

@ -4963,7 +4963,7 @@ function extractTar(archivePath) {
exports.extractTar = extractTar; exports.extractTar = extractTar;
function createTar(archiveFolder, sourceDirectories) { function createTar(archiveFolder, sourceDirectories) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// TODO: will want to stream sourceDirectories into tar // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt"; const manifestFilename = "manifest.txt";
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n"));
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();

View file

@ -1 +0,0 @@
Fri Mar 6 11:28:08 PST 2020

View file

@ -48,7 +48,7 @@ export async function createTar(
archiveFolder: string, archiveFolder: string,
sourceDirectories: string[] sourceDirectories: string[]
): Promise<void> { ): Promise<void> {
// TODO: will want to stream sourceDirectories into tar // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt"; const manifestFilename = "manifest.txt";
writeFileSync( writeFileSync(
path.join(archiveFolder, manifestFilename), path.join(archiveFolder, manifestFilename),

View file

@ -2,6 +2,7 @@ import * as core from "@actions/core";
import * as io from "@actions/io"; import * as io from "@actions/io";
import * as glob from "@actions/glob"; import * as glob from "@actions/glob";
import * as fs from "fs"; import * as fs from "fs";
import * as os from "os";
import * as path from "path"; import * as path from "path";
import * as uuidV4 from "uuid/v4"; import * as uuidV4 from "uuid/v4";