diff --git a/action.yml b/action.yml index cb4c1574..36b03c55 100644 --- a/action.yml +++ b/action.yml @@ -48,6 +48,14 @@ inputs: description: "Check if a cache entry exists without downloading the cache" required: false default: "false" + incremental: + description: "Determines whether to cache incremental builds - speeding up builds for more disk usage. Defaults to false." + required: false + default: "false" + incremental-key: + description: "The key to use for incremental builds. Used on a per-branch basis" + required: false + default: ${{ github.ref }} outputs: cache-hit: description: "A boolean value that indicates an exact match was found." diff --git a/dist/restore/index.js b/dist/restore/index.js index ea98ed12..6882650f 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -86711,9 +86711,17 @@ class CacheConfig { constructor() { /** All the paths we want to cache */ this.cachePaths = []; + /** All the paths we want to cache for incremental builds */ + // public incrementalPaths: Array = []; /** The primary cache key */ this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ + /** The primary cache key for incremental builds */ + this.incrementalKey = ""; + /** + * The secondary (restore) key that only contains the prefix and environment + * This should be used if the primary cacheKey is not available - IE pulling from main on a branch + * instead of the branch itself + * */ this.restoreKey = ""; /** Whether to cache CARGO_HOME/.bin */ this.cacheBin = true; @@ -86721,6 +86729,8 @@ class CacheConfig { this.workspaces = []; /** The cargo binaries present during main step */ this.cargoBins = []; + /** Whether to cache incremental builds */ + this.incremental = false; /** The prefix portion of the cache key */ this.keyPrefix = ""; /** The rust version considered for the cache key */ @@ -86787,6 +86797,9 @@ class CacheConfig { } } self.keyEnvs = keyEnvs; + // Make sure we consider incremental builds + self.incremental = lib_core.getInput("incremental").toLowerCase() == "true"; + hasher.update(`incremental=${self.incremental}`); key += `-${digest(hasher)}`; self.restoreKey = key; // Construct the lockfiles portion of the key: @@ -86908,6 +86921,14 @@ class CacheConfig { } const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); + if (self.incremental) { + // wire the incremental key to be just for this branch + const branchName = lib_core.getInput("incremental-key") || "-shared"; + const incrementalKey = key + `-incremental--` + branchName; + self.incrementalKey = incrementalKey; + // Add the incremental cache to the cachePaths so we can restore it + self.cachePaths.push(external_path_default().join(config_CARGO_HOME, "incremental-restore.json")); + } return self; } /** @@ -86958,6 +86979,7 @@ class CacheConfig { for (const file of this.keyFiles) { lib_core.info(` - ${file}`); } + lib_core.info(`.. Incremental: ${this.incremental}`); lib_core.endGroup(); } /** @@ -87314,6 +87336,9 @@ async function rmRF(dirName) { +// import { saveMtimes } from "./incremental"; + + process.on("uncaughtException", (e) => { lib_core.error(e.message); if (e.stack) { @@ -87333,10 +87358,12 @@ async function run() { } var lookupOnly = lib_core.getInput("lookup-only").toLowerCase() === "true"; lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); - lib_core.exportVariable("CARGO_INCREMENTAL", 0); const config = await CacheConfig.new(); config.printInfo(cacheProvider); lib_core.info(""); + if (!config.incremental) { + lib_core.exportVariable("CARGO_INCREMENTAL", 0); + } lib_core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); const key = config.cacheKey; // Pass a copy of cachePaths to avoid mutating the original array as reported by: @@ -87346,7 +87373,7 @@ async function run() { lookupOnly, }); if (restoreKey) { - const match = restoreKey === key; + let match = restoreKey === key; lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); if (!match) { // pre-clean the target directory on cache mismatch @@ -87359,10 +87386,34 @@ async function run() { // We restored the cache but it is not a full match. config.saveState(); } + // Restore the incremental-restore.json file and write the mtimes to all the files in the list + if (config.incremental) { + try { + const restoreJson = external_path_default().join(config_CARGO_HOME, "incremental-restore.json"); + const restoreString = await external_fs_default().promises.readFile(restoreJson, "utf8"); + const restoreData = JSON.parse(restoreString); + lib_core.debug(`restoreData: ${JSON.stringify(restoreData)}`); + if (restoreData.roots.length == 0) { + throw new Error("No incremental roots found"); + } + const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly }); + lib_core.debug(`restoring incremental builds from ${incrementalKey}`); + for (const [file, mtime] of Object.entries(restoreData.times)) { + lib_core.debug(`restoring ${file} with mtime ${mtime}`); + await external_fs_default().promises.utimes(file, new Date(mtime), new Date(mtime)); + } + } + catch (err) { + lib_core.debug(`Could not restore incremental cache - ${err}`); + lib_core.debug(`${err.stack}`); + match = false; + } + config.saveState(); + } setCacheHitOutput(match); } else { - lib_core.info("No cache found."); + lib_core.info(`No cache found for ${config.cacheKey} - this key was found ${restoreKey}`); config.saveState(); setCacheHitOutput(false); } diff --git a/dist/save/index.js b/dist/save/index.js index ea2ddfb9..cee02474 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -86711,9 +86711,17 @@ class CacheConfig { constructor() { /** All the paths we want to cache */ this.cachePaths = []; + /** All the paths we want to cache for incremental builds */ + // public incrementalPaths: Array = []; /** The primary cache key */ this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ + /** The primary cache key for incremental builds */ + this.incrementalKey = ""; + /** + * The secondary (restore) key that only contains the prefix and environment + * This should be used if the primary cacheKey is not available - IE pulling from main on a branch + * instead of the branch itself + * */ this.restoreKey = ""; /** Whether to cache CARGO_HOME/.bin */ this.cacheBin = true; @@ -86721,6 +86729,8 @@ class CacheConfig { this.workspaces = []; /** The cargo binaries present during main step */ this.cargoBins = []; + /** Whether to cache incremental builds */ + this.incremental = false; /** The prefix portion of the cache key */ this.keyPrefix = ""; /** The rust version considered for the cache key */ @@ -86787,6 +86797,9 @@ class CacheConfig { } } self.keyEnvs = keyEnvs; + // Make sure we consider incremental builds + self.incremental = core.getInput("incremental").toLowerCase() == "true"; + hasher.update(`incremental=${self.incremental}`); key += `-${digest(hasher)}`; self.restoreKey = key; // Construct the lockfiles portion of the key: @@ -86908,6 +86921,14 @@ class CacheConfig { } const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); + if (self.incremental) { + // wire the incremental key to be just for this branch + const branchName = core.getInput("incremental-key") || "-shared"; + const incrementalKey = key + `-incremental--` + branchName; + self.incrementalKey = incrementalKey; + // Add the incremental cache to the cachePaths so we can restore it + self.cachePaths.push(external_path_default().join(CARGO_HOME, "incremental-restore.json")); + } return self; } /** @@ -86958,6 +86979,7 @@ class CacheConfig { for (const file of this.keyFiles) { core.info(` - ${file}`); } + core.info(`.. Incremental: ${this.incremental}`); core.endGroup(); } /** @@ -87309,12 +87331,59 @@ async function rmRF(dirName) { await io.rmRF(dirName); } +;// CONCATENATED MODULE: ./src/incremental.ts +// import * as core from "@actions/core"; +// import * as io from "@actions/io"; +// import { CARGO_HOME } from "./config"; +// import { exists } from "./utils"; +// import { Packages } from "./workspace"; + + +async function saveMtimes(targetDirs) { + let data = { + roots: [], + times: {}, + }; + let stack = []; + // Collect all the incremental files + for (const dir of targetDirs) { + for (const maybeProfile of await external_fs_default().promises.readdir(dir)) { + const profileDir = external_path_default().join(dir, maybeProfile); + const incrementalDir = external_path_default().join(profileDir, "incremental"); + if (external_fs_default().existsSync(incrementalDir)) { + stack.push(incrementalDir); + } + } + } + // Save the stack as the roots - we cache these directly + data.roots = stack.slice(); + while (stack.length > 0) { + const dirName = stack.pop(); + const dir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + stack.push(external_path_default().join(dirName, dirent.name)); + } + else { + const fileName = external_path_default().join(dirName, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + data.times[fileName] = mtime.getTime(); + } + } + } + return data; +} + ;// CONCATENATED MODULE: ./src/save.ts + + + + process.on("uncaughtException", (e) => { core.error(e.message); if (e.stack) { @@ -87339,6 +87408,28 @@ async function run() { if (process.env["RUNNER_OS"] == "macOS") { await macOsWorkaround(); } + // Save the incremental cache before we delete it + if (config.incremental) { + core.info(`... Saving incremental cache ...`); + try { + const targetDirs = config.workspaces.map((ws) => ws.target); + const cache = await saveMtimes(targetDirs); + const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey); + core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`); + // write the incremental-restore.json file + const serialized = JSON.stringify(cache); + await external_fs_default().promises.writeFile(external_path_default().join(CARGO_HOME, "incremental-restore.json"), serialized); + // Delete the incremental cache before proceeding + for (const [path, _mtime] of cache.roots) { + core.debug(` deleting ${path}`); + await (0,promises_.rm)(path); + } + } + catch (e) { + core.debug(`Failed to save incremental cache`); + core.debug(`${e.stack}`); + } + } const allPackages = []; for (const workspace of config.workspaces) { const packages = await workspace.getPackagesOutsideWorkspaceRoot(); @@ -87375,7 +87466,7 @@ async function run() { catch (e) { core.debug(`${e.stack}`); } - core.info(`... Saving cache ...`); + core.info(`... Saving cache with key ${config.cacheKey}`); // Pass a copy of cachePaths to avoid mutating the original array as reported by: // https://github.com/actions/toolkit/pull/1378 // TODO: remove this once the underlying bug is fixed. diff --git a/src/cleanup.ts b/src/cleanup.ts index d84a9d5f..68edbdfb 100644 --- a/src/cleanup.ts +++ b/src/cleanup.ts @@ -25,7 +25,7 @@ export async function cleanTargetDir(targetDir: string, packages: Packages, chec } else { await cleanProfileTarget(dirName, packages, checkTimestamp); } - } catch {} + } catch { } } else if (dirent.name !== "CACHEDIR.TAG") { await rm(dir.path, dirent); } @@ -43,11 +43,11 @@ async function cleanProfileTarget(profileDir: string, packages: Packages, checkT // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp); - } catch {} + } catch { } try { // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp); - } catch {} + } catch { } // Delete everything else. await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); @@ -86,7 +86,7 @@ export async function getCargoBins(): Promise> { bins.add(bin); } } - } catch {} + } catch { } return bins; } @@ -117,7 +117,7 @@ export async function cleanRegistry(packages: Packages, crates = true) { const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml"); core.debug(`deleting "${credentials}"`); await fs.promises.unlink(credentials); - } catch {} + } catch { } // `.cargo/registry/index` let pkgSet = new Set(packages.map((p) => p.name)); @@ -229,7 +229,7 @@ export async function cleanGit(packages: Packages) { await rm(dir.path, dirent); } } - } catch {} + } catch { } // clean the checkouts try { @@ -250,7 +250,7 @@ export async function cleanGit(packages: Packages) { } } } - } catch {} + } catch { } } const ONE_WEEK = 7 * 24 * 3600 * 1000; @@ -302,7 +302,7 @@ async function rm(parent: string, dirent: fs.Dirent) { } else if (dirent.isDirectory()) { await io.rmRF(fileName); } - } catch {} + } catch { } } async function rmRF(dirName: string) { diff --git a/src/config.ts b/src/config.ts index 5104f5c7..455f53cd 100644 --- a/src/config.ts +++ b/src/config.ts @@ -20,9 +20,21 @@ const HASH_LENGTH = 8; export class CacheConfig { /** All the paths we want to cache */ public cachePaths: Array = []; + + /** All the paths we want to cache for incremental builds */ + // public incrementalPaths: Array = []; + /** The primary cache key */ public cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ + + /** The primary cache key for incremental builds */ + public incrementalKey = ""; + + /** + * The secondary (restore) key that only contains the prefix and environment + * This should be used if the primary cacheKey is not available - IE pulling from main on a branch + * instead of the branch itself + * */ public restoreKey = ""; /** Whether to cache CARGO_HOME/.bin */ @@ -34,6 +46,9 @@ export class CacheConfig { /** The cargo binaries present during main step */ public cargoBins: Array = []; + /** Whether to cache incremental builds */ + public incremental: boolean = false; + /** The prefix portion of the cache key */ private keyPrefix = ""; /** The rust version considered for the cache key */ @@ -43,7 +58,7 @@ export class CacheConfig { /** The files considered for the cache key */ private keyFiles: Array = []; - private constructor() {} + private constructor() { } /** * Constructs a [`CacheConfig`] with all the paths and keys. @@ -115,6 +130,10 @@ export class CacheConfig { self.keyEnvs = keyEnvs; + // Make sure we consider incremental builds + self.incremental = core.getInput("incremental").toLowerCase() == "true"; + hasher.update(`incremental=${self.incremental}`); + key += `-${digest(hasher)}`; self.restoreKey = key; @@ -267,6 +286,16 @@ export class CacheConfig { const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); + if (self.incremental) { + // wire the incremental key to be just for this branch + const branchName = core.getInput("incremental-key") || "-shared"; + const incrementalKey = key + `-incremental--` + branchName; + self.incrementalKey = incrementalKey; + + // Add the incremental cache to the cachePaths so we can restore it + self.cachePaths.push(path.join(CARGO_HOME, "incremental-restore.json")); + } + return self; } @@ -321,6 +350,7 @@ export class CacheConfig { for (const file of this.keyFiles) { core.info(` - ${file}`); } + core.info(`.. Incremental: ${this.incremental}`); core.endGroup(); } diff --git a/src/incremental.ts b/src/incremental.ts new file mode 100644 index 00000000..4e8865a2 --- /dev/null +++ b/src/incremental.ts @@ -0,0 +1,54 @@ +// import * as core from "@actions/core"; +// import * as io from "@actions/io"; +// import { CARGO_HOME } from "./config"; +// import { exists } from "./utils"; +// import { Packages } from "./workspace"; + +import fs from "fs"; +import path from "path"; + +export type MtimeData = { + roots: string[], + times: { + [key: string]: number + } +}; + +export async function saveMtimes(targetDirs: string[]): Promise { + let data: MtimeData = { + roots: [], + times: {}, + }; + let stack: string[] = []; + + // Collect all the incremental files + for (const dir of targetDirs) { + for (const maybeProfile of await fs.promises.readdir(dir)) { + const profileDir = path.join(dir, maybeProfile); + const incrementalDir = path.join(profileDir, "incremental"); + if (fs.existsSync(incrementalDir)) { + stack.push(incrementalDir); + } + } + } + + // Save the stack as the roots - we cache these directly + data.roots = stack.slice(); + + while (stack.length > 0) { + const dirName = stack.pop()!; + const dir = await fs.promises.opendir(dirName); + + for await (const dirent of dir) { + if (dirent.isDirectory()) { + stack.push(path.join(dirName, dirent.name)); + } else { + const fileName = path.join(dirName, dirent.name); + const { mtime } = await fs.promises.stat(fileName); + data.times[fileName] = mtime.getTime(); + } + } + } + + return data; +} diff --git a/src/restore.ts b/src/restore.ts index 21af56fa..78e7975b 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -1,8 +1,12 @@ import * as core from "@actions/core"; import { cleanTargetDir } from "./cleanup"; -import { CacheConfig } from "./config"; +import { CacheConfig, CARGO_HOME } from "./config"; import { getCacheProvider, reportError } from "./utils"; +// import { saveMtimes } from "./incremental"; +import path from "path"; +import fs from "fs"; +import { MtimeData } from "./incremental"; process.on("uncaughtException", (e) => { core.error(e.message); @@ -27,12 +31,15 @@ async function run() { var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true"; core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); - core.exportVariable("CARGO_INCREMENTAL", 0); const config = await CacheConfig.new(); config.printInfo(cacheProvider); core.info(""); + if (!config.incremental) { + core.exportVariable("CARGO_INCREMENTAL", 0); + } + core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); const key = config.cacheKey; // Pass a copy of cachePaths to avoid mutating the original array as reported by: @@ -42,23 +49,53 @@ async function run() { lookupOnly, }); if (restoreKey) { - const match = restoreKey === key; + let match = restoreKey === key; core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); + if (!match) { // pre-clean the target directory on cache mismatch for (const workspace of config.workspaces) { try { await cleanTargetDir(workspace.target, [], true); - } catch {} + } catch { } } // We restored the cache but it is not a full match. config.saveState(); } + // Restore the incremental-restore.json file and write the mtimes to all the files in the list + if (config.incremental) { + try { + const restoreJson = path.join(CARGO_HOME, "incremental-restore.json"); + const restoreString = await fs.promises.readFile(restoreJson, "utf8"); + const restoreData: MtimeData = JSON.parse(restoreString); + + core.debug(`restoreData: ${JSON.stringify(restoreData)}`); + + if (restoreData.roots.length == 0) { + throw new Error("No incremental roots found"); + } + + const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly }); + core.debug(`restoring incremental builds from ${incrementalKey}`); + + for (const [file, mtime] of Object.entries(restoreData.times)) { + core.debug(`restoring ${file} with mtime ${mtime}`); + await fs.promises.utimes(file, new Date(mtime), new Date(mtime)); + } + + } catch (err) { + core.debug(`Could not restore incremental cache - ${err}`); + core.debug(`${(err as any).stack}`); + match = false; + } + config.saveState(); + } + setCacheHitOutput(match); } else { - core.info("No cache found."); + core.info(`No cache found for ${config.cacheKey} - this key was found ${restoreKey}`); config.saveState(); setCacheHitOutput(false); diff --git a/src/save.ts b/src/save.ts index a62019e4..be36fbcc 100644 --- a/src/save.ts +++ b/src/save.ts @@ -2,8 +2,12 @@ import * as core from "@actions/core"; import * as exec from "@actions/exec"; import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; -import { CacheConfig, isCacheUpToDate } from "./config"; +import { CacheConfig, CARGO_HOME, isCacheUpToDate } from "./config"; import { getCacheProvider, reportError } from "./utils"; +import { rm } from "fs/promises"; +import fs from "fs"; +import path from "path"; +import { saveMtimes } from "./incremental"; process.on("uncaughtException", (e) => { core.error(e.message); @@ -36,6 +40,30 @@ async function run() { await macOsWorkaround(); } + // Save the incremental cache before we delete it + if (config.incremental) { + core.info(`... Saving incremental cache ...`); + try { + const targetDirs = config.workspaces.map((ws) => ws.target); + const cache = await saveMtimes(targetDirs); + const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey); + core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`); + + // write the incremental-restore.json file + const serialized = JSON.stringify(cache); + await fs.promises.writeFile(path.join(CARGO_HOME, "incremental-restore.json"), serialized); + + // Delete the incremental cache before proceeding + for (const [path, _mtime] of cache.roots) { + core.debug(` deleting ${path}`); + await rm(path); + } + } catch (e) { + core.debug(`Failed to save incremental cache`); + core.debug(`${(e as any).stack}`); + } + } + const allPackages = []; for (const workspace of config.workspaces) { const packages = await workspace.getPackagesOutsideWorkspaceRoot(); @@ -72,7 +100,7 @@ async function run() { core.debug(`${(e as any).stack}`); } - core.info(`... Saving cache ...`); + core.info(`... Saving cache with key ${config.cacheKey}`); // Pass a copy of cachePaths to avoid mutating the original array as reported by: // https://github.com/actions/toolkit/pull/1378 // TODO: remove this once the underlying bug is fixed. @@ -90,5 +118,5 @@ async function macOsWorkaround() { // Workaround for https://github.com/actions/cache/issues/403 // Also see https://github.com/rust-lang/cargo/issues/8603 await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); - } catch {} + } catch { } }