51 Commits

Author SHA1 Message Date
Arpad Borsos
842ef286ff update dependencies and rebuild 2021-05-30 11:05:48 +02:00
Arpad Borsos
1b344a0a23 prepare new version 2021-05-30 11:04:10 +02:00
Arpad Borsos
31c41a926e Handle missing cargo installs gracefully
fixes #17
2021-05-30 10:55:21 +02:00
Arpad Borsos
ebd95456c3 rebuild 2021-05-19 10:11:17 +02:00
Arpad Borsos
3b8bbcb11d add description 2021-05-19 10:10:43 +02:00
Rik Nauta
f82d41bcc2 feat: allow for configurable target-dir 2021-05-19 10:06:31 +02:00
Arpad Borsos
063471b9dd update dependencies 2021-05-19 10:05:17 +02:00
Arpad Borsos
ce325b6065 rebuild 2021-03-19 17:10:47 +01:00
Caleb Maclennan
da42bbe56d Additionally key on Rust toolchain file(s) if present 2021-03-15 14:07:46 +01:00
Arpad Borsos
a9bca6b5a6 1.2.0 2021-02-16 09:09:13 +01:00
Arpad Borsos
b17d52110e Add Changelog 2021-02-16 09:08:48 +01:00
Arpad Borsos
b495963495 Add a selftest and support for .cargo/bin 2021-02-16 09:06:04 +01:00
Arpad Borsos
83aad8d470 rebuild 2021-01-28 18:42:00 +01:00
Arpad Borsos
958028d559 document cache-hit output
fixes #5
2021-01-28 18:40:43 +01:00
Austin Jones
27793b3b80 Add support for the cache-hit output 2021-01-28 18:39:18 +01:00
Arpad Borsos
be44a3e6ff introduce a new sharedKey option
fixes #6
2021-01-28 18:39:11 +01:00
Arpad Borsos
2639a56bb8 implement support for CARGO_HOME
fixes #8
2021-01-28 18:16:36 +01:00
Arpad Borsos
cbcc887094 update deps 2021-01-28 18:08:24 +01:00
Arpad Borsos
ae893481e8 Write a few Notes in the Readme 2021-01-10 09:42:14 +01:00
Arpad Borsos
d7bda0e369 update dependencies 2021-01-10 09:30:57 +01:00
Arpad Borsos
9c05405335 1.1.0 2020-12-07 23:58:20 +01:00
Arpad Borsos
08d3994b7a rebuild 2020-12-07 23:58:13 +01:00
Arpad Borsos
9e10a44ea3 support working-directory input, cleanup 2020-12-07 23:56:50 +01:00
Arpad Borsos
fb2efae33d update readme 2020-11-21 12:21:26 +01:00
Arpad Borsos
da5df52d2f update deps 2020-11-21 12:11:25 +01:00
Arpad Borsos
0eea7b85d4 update dependencies and rebuild 2020-11-07 10:22:23 +01:00
Arpad Borsos
645c6972a6 update to npm v7 lockfile 2020-10-21 09:43:08 +02:00
Arpad Borsos
6ccf2463db deal with uncaught exceptions 2020-10-21 09:32:47 +02:00
Arpad Borsos
9cc357c650 update dependencies 2020-10-21 09:00:54 +02:00
Arpad Borsos
9de90d2338 only pre-clean when something was restored 2020-10-13 14:01:46 +02:00
Arpad Borsos
292ef23e77 avoid error when saving without git dependencies 2020-10-13 13:52:55 +02:00
Arpad Borsos
5f6034beb8 improve log output 2020-10-06 12:57:42 +02:00
Arpad Borsos
b740ae5d3a clean and persist the git db/checkouts 2020-10-05 18:18:59 +02:00
Arpad Borsos
e8e3c57b3b merge all the caches and simplify 2020-10-03 18:39:38 +02:00
Arpad Borsos
f77cb1be47 merge the registry caches together 2020-10-03 18:10:54 +02:00
Arpad Borsos
2bcc375de8 key target by job id automatically 2020-10-03 17:33:09 +02:00
Arpad Borsos
bd4d2a7017 1.0.2 2020-09-29 12:30:45 +02:00
Arpad Borsos
d38127a85b Improve target pruning
fixes #1
2020-09-29 12:30:19 +02:00
Arpad Borsos
a4a1d8e7a6 1.0.1 2020-09-28 13:06:41 +02:00
Arpad Borsos
33677a20f2 add changelog 2020-09-28 12:53:56 +02:00
Arpad Borsos
1d1bff80c5 update readme 2020-09-28 12:51:40 +02:00
Arpad Borsos
08ca2ff969 make macos workaround silent 2020-09-28 12:46:36 +02:00
Arpad Borsos
ef89c3a8eb typo 2020-09-28 12:30:40 +02:00
Arpad Borsos
d45cd2b045 rebuild 2020-09-28 12:26:12 +02:00
Arpad Borsos
271ff4b692 log individual timings 2020-09-28 12:26:11 +02:00
Arpad Borsos
a6b59fa340 clean up exports 2020-09-28 12:14:11 +02:00
Arpad Borsos
e0c07d2a65 work around macos cache corruption 2020-09-28 12:08:11 +02:00
Arpad Borsos
06ff70612d remove git-db for now 2020-09-28 12:06:51 +02:00
Arpad Borsos
1304a2ec8d add ability to version caches 2020-09-28 12:00:58 +02:00
Arpad Borsos
cfcc373039 improve logging 2020-09-28 11:54:24 +02:00
Arpad Borsos
8902a8fc6c collect packages with --all-features 2020-09-28 11:44:06 +02:00
16 changed files with 30678 additions and 18053 deletions

29
.github/workflows/selftest.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
name: CI
on: [push, pull_request]
jobs:
selftest:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test Action on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- uses: ./
- run: |
cargo install cargo-deny --locked
cargo check
cargo test

1
.gitignore vendored
View File

@@ -1 +1,2 @@
node_modules
/target

30
CHANGELOG.md Normal file
View File

@@ -0,0 +1,30 @@
# Changelog
## 1.3.0
- Use Rust toolchain file as additional cache key.
- Allow for a configurable target-dir.
## 1.2.0
- Cache `~/.cargo/bin`.
- Support for custom `$CARGO_HOME`.
- Add a `cache-hit` output.
- Add a new `sharedKey` option that overrides the automatic job-name based key.
## 1.1.0
- Add a new `working-directory` input.
- Support caching git dependencies.
- Lots of other improvements.
## 1.0.2
- Dont prune targets that have a different name from the crate, but do prune targets from the workspace.
## 1.0.1
- Improved logging output.
- Make sure to consider `all-features` dependencies when pruning.
- Work around macOS cache corruption.
- Remove git-db cache for now.

1665
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

10
Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
publish = false
name = "rust-cache"
version = "0.1.0"
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2018"
[dev-dependencies]
reqwest = "0.11.0"
actix-web = { git = "https://github.com/actix/actix-web.git", rev = "bd26083f333ecf63e3eb444748250364ce124f5e" }

View File

@@ -1,51 +1,87 @@
# Rust Cache Action
A GitHub Action that implements smart caching for rust/cargo projects
## Inputs
- `key` - An optional key for the `target` cache. This is useful in case you
have different jobs for test / check / clippy, etc
A GitHub Action that implements smart caching for rust/cargo projects with
sensible defaults.
## Example usage
```yaml
- uses: Swatinem/rust-cache@v1
# selecting a toolchain either by action or manual `rustup` calls should happen
# before the plugin, as it uses the current rustc version as its cache key
- uses: actions-rs/toolchain@v1
with:
key: test
profile: minimal
toolchain: stable
- uses: Swatinem/rust-cache@v1
```
## Specifics
## Inputs
This action tries to be better than just caching the following directories:
: `key`
An optional key that is added to the automatic cache key.
```
~/.cargo/registry
~/.cargo/git
target
```
: `sharedKey`
An additional key that is stable over multiple jobs.
It disables incremental compilation and only caches dependencies. The
assumption is that we will likely recompile the own crate(s) anyway.
: `working-directory`
The working directory the action operates in, is case the cargo project is not
located in the repo root.
It also separates the cache into 4 groups, each treated differently:
: `target-dir`
The target directory that should be cleaned and persisted, defaults to `./target`.
- Index: `~/.cargo/registry/index/<registry>`:
## Outputs
This is always restored from its latest snapshot, and persisted based on the
most recent revision.
: `cache-hit`
- Registry / Cache: `~/.cargo/registry/cache/<registry>`:
This is a boolean flag that will be set to `true` when there was an exact cache hit.
Automatically keyed by the lockfile/toml hash, and is being pruned to only
persist the dependencies that are being used.
## Cache Effectiveness
- Registry / Git: `~/.cargo/registry/git/<registry>`:
This action only caches the _dependencies_ of a crate, so is more effective if
the dependency / own code ratio is higher.
Automatically keyed by the lockfile/toml hash. Pruning is still TODO.
It is also most effective for repositories with a `Cargo.lock` file. Library
repositories with only a `Cargo.toml` file have limited benefits, as cargo will
_always_ use the most up-to-date dependency versions, which may not be cached.
- target: `./target`
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
Using it with Nightly Rust is less effective as it will throw away the cache every day.
Automatically keyed by the lockfile/toml hash, and is being pruned to only
persist the dependencies that are being used. This is especially throwing
away any intermediate artifacts.
## Versioning
I use the `v1` branch similar to `master` development, so if you want to have
a more stable experience, please use a fixed revision or tag.
## Cache Details
The cache currently caches the following directories:
- `~/.cargo/registry/index`
- `~/.cargo/registry/cache`
- `~/.cargo/git`
- `./target`
This cache is automatically keyed by:
- the github `job`,
- the rustc release / host / hash, and
- a hash of the `Cargo.lock` / `Cargo.toml` files (if present).
- a hash of the `rust-toolchain` / `rust-toolchain.toml` files (if present).
An additional input `key` can be provided if the builtin keys are not sufficient.
Before persisting, the cache is cleaned of intermediate artifacts and
anything that is not a workspace dependency.
In particular, no caching of workspace crates will be done. For
this reason, this action will automatically set `CARGO_INCREMENTAL=0` to
disable incremental compilation.
The action will try to restore from a previous `Cargo.lock` version as well, so
lockfile updates should only re-build changed dependencies.
Additionally, the action automatically works around
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
[actions/cache#403](https://github.com/actions/cache/issues/403) which would
otherwise corrupt the cache on macOS builds.

View File

@@ -1,10 +1,22 @@
name: "Rust Cache"
description: "A GitHub Action that implements smart caching for rust/cargo projects"
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
author: "Arpad Borsos <arpad.borsos@googlemail.com>"
inputs:
key:
description: "An explicit key for restoring and saving the target cache"
description: "An additional key for the cache"
required: false
sharedKey:
description: "An additional cache key that is stable over multiple jobs"
required: false
working-directory:
description: "The working directory this action should operate in"
required: false
target-dir:
description: "The target dir that should be cleaned and persisted, defaults to `./target`"
required: false
outputs:
cache-hit:
description: "A boolean value that indicates an exact match was found"
runs:
using: "node12"
main: "dist/restore/index.js"

22610
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

22825
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

943
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
{
"private": true,
"name": "rust-cache",
"version": "1.0.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects",
"version": "1.2.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [
"actions",
"rust",
@@ -17,20 +17,22 @@
"bugs": {
"url": "https://github.com/Swatinem/rust-cache/issues"
},
"funding": "https://github.com/sponsors/Swatinem",
"funding": {
"url": "https://github.com/sponsors/Swatinem"
},
"homepage": "https://github.com/Swatinem/rust-cache#readme",
"dependencies": {
"@actions/cache": "^1.0.2",
"@actions/core": "^1.2.6",
"@actions/cache": "^1.0.7",
"@actions/core": "^1.3.0",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.1.0",
"@actions/io": "^1.0.2"
"@actions/glob": "^0.1.2",
"@actions/io": "^1.1.0"
},
"devDependencies": {
"@vercel/ncc": "^0.24.1",
"typescript": "^4.0.3"
"@vercel/ncc": "^0.28.6",
"typescript": "^4.3.2"
},
"scripts": {
"prepare": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts"
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
}
}

View File

@@ -1,30 +1,41 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import path from "path";
const home = os.homedir();
export const paths = {
index: path.join(home, ".cargo/registry/index"),
cache: path.join(home, ".cargo/registry/cache"),
git: path.join(home, ".cargo/git/db"),
target: "target",
};
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
});
export interface CacheConfig {
path: string;
key: string;
restoreKeys?: Array<string>;
const cwd = core.getInput("working-directory");
// TODO: this could be read from .cargo config file directly
const targetDir = core.getInput("target-dir") || "./target";
if (cwd) {
process.chdir(cwd);
}
export interface Caches {
index: CacheConfig;
cache: CacheConfig;
git: CacheConfig;
target: CacheConfig;
export const stateBins = "RUST_CACHE_BINS";
export const stateKey = "RUST_CACHE_KEY";
const stateHash = "RUST_CACHE_HASH";
const home = os.homedir();
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
export const paths = {
cargoHome,
index: path.join(cargoHome, "registry/index"),
cache: path.join(cargoHome, "registry/cache"),
git: path.join(cargoHome, "git"),
target: targetDir,
};
interface CacheConfig {
paths: Array<string>;
key: string;
restoreKeys: Array<string>;
}
const RefKey = "GITHUB_REF";
@@ -33,32 +44,67 @@ export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
}
export async function getCaches(): Promise<Caches> {
const rustKey = await getRustKey();
let lockHash = core.getState("lockHash");
export async function getCacheConfig(): Promise<CacheConfig> {
let lockHash = core.getState(stateHash);
if (!lockHash) {
lockHash = await getLockfileHash();
core.saveState("lockHash", lockHash);
core.saveState(stateHash, lockHash);
}
let targetKey = core.getInput("key");
if (targetKey) {
targetKey = `${targetKey}-`;
let key = `v0-rust-`;
const sharedKey = core.getInput("sharedKey");
if (sharedKey) {
key += `${sharedKey}-`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `${inputKey}-`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `${job}-`;
}
}
key += await getRustKey();
return {
index: { path: paths.index, key: "registry-index-XXX", restoreKeys: ["registry-index"] },
cache: { path: paths.cache, key: `registry-cache-${lockHash}`, restoreKeys: ["registry-cache"] },
git: { path: paths.git, key: "git-db" },
target: {
path: paths.target,
key: `target-${targetKey}${rustKey}-${lockHash}`,
restoreKeys: [`target-${targetKey}${rustKey}`],
},
paths: [
path.join(cargoHome, "bin"),
path.join(cargoHome, ".crates2.json"),
path.join(cargoHome, ".crates.toml"),
paths.git,
paths.cache,
paths.index,
paths.target,
],
key: `${key}-${lockHash}`,
restoreKeys: [key],
};
}
export async function getRustKey(): Promise<string> {
export async function getCargoBins(): Promise<Set<string>> {
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
);
const bins = new Set<string>();
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
return bins;
} catch {
return new Set<string>();
}
}
async function getRustKey(): Promise<string> {
const rustc = await getRustVersion();
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"]}`;
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
}
interface RustVersion {
@@ -67,7 +113,7 @@ interface RustVersion {
"commit-hash": string;
}
export async function getRustVersion(): Promise<RustVersion> {
async function getRustVersion(): Promise<RustVersion> {
const stdout = await getCmdOutput("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
@@ -95,22 +141,10 @@ export async function getCmdOutput(
return stdout;
}
export async function getRegistryName() {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.debug(`got multiple registries: "${files.join('", "')}"`);
}
const first = files.shift();
if (!first) {
return;
}
return path.basename(path.dirname(first));
}
export async function getLockfileHash() {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false });
async function getLockfileHash(): Promise<string> {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
followSymbolicLinks: false,
});
const files = await globber.glob();
files.sort((a, b) => a.localeCompare(b));
@@ -120,5 +154,97 @@ export async function getLockfileHash() {
hasher.update(chunk);
}
}
return hasher.digest("hex");
return hasher.digest("hex").slice(0, 20);
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}
export async function getPackages(): Promise<Packages> {
const cwd = process.cwd();
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
return meta.packages
.filter((p) => !p.manifest_path.startsWith(cwd))
.map((p) => {
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
return { name: p.name, version: p.version, targets, path: path.dirname(p.manifest_path) };
});
}
export async function cleanTarget(packages: Packages) {
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
await io.rmRF(path.join(targetDir, "./debug/examples"));
await io.rmRF(path.join(targetDir, "./debug/incremental"));
let dir: fs.Dir;
// remove all *files* from debug
dir = await fs.promises.opendir(path.join(targetDir, "./debug"));
for await (const dirent of dir) {
if (dirent.isFile()) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(targetDir, "./debug/build"), keepPkg);
await rmExcept(path.join(targetDir, "./debug/.fingerprint"), keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(targetDir, "./debug/deps"), keepDeps);
}
const oneWeek = 7 * 24 * 3600 * 1000;
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
// we dont really know
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
await rm(dir.path, dirent);
}
}
}
export async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch {}
}

3
src/main.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
println!("Hello, world!");
}

View File

@@ -1,35 +1,46 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { getCaches, isValidEvent } from "./common";
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
async function run() {
if (!isValidEvent()) {
return;
}
try {
core.exportVariable("CARGO_INCREMENTAL", 0);
const caches = await getCaches();
for (const [name, { path, key, restoreKeys }] of Object.entries(caches)) {
try {
core.startGroup(`Restoring "${path}" from "${key}"…`);
const restoreKey = await cache.restoreCache([path], key, restoreKeys);
if (restoreKey) {
core.info(`Restored "${path}" from cache key "${restoreKey}".`);
core.saveState(name, restoreKey);
} else {
core.info("No cache found.");
}
} catch (e) {
core.info(`[warning] ${e.message}`);
} finally {
core.endGroup();
const { paths, key, restoreKeys } = await getCacheConfig();
const bins = await getCargoBins();
core.saveState(stateBins, JSON.stringify([...bins]));
core.info(`Restoring paths:\n ${paths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
if (restoreKey) {
core.info(`Restored from cache key "${restoreKey}".`);
core.saveState(stateKey, restoreKey);
if (restoreKey !== key) {
// pre-clean the target directory on cache mismatch
const packages = await getPackages();
await cleanTarget(packages);
}
setCacheHitOutput(restoreKey === key);
} else {
core.info("No cache found.");
setCacheHitOutput(false);
}
} catch (e) {
setCacheHitOutput(false);
core.info(`[warning] ${e.message}`);
}
}
function setCacheHitOutput(cacheHit: boolean): void {
core.setOutput("cache-hit", cacheHit.toString());
}
run();

View File

@@ -1,49 +1,57 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import { getCaches, getCmdOutput, getRegistryName, isValidEvent, paths } from "./common";
import {
cleanTarget,
getCacheConfig,
getCargoBins,
getPackages,
Packages,
paths,
rm,
stateBins,
stateKey,
} from "./common";
async function run() {
if (!isValidEvent()) {
//return;
}
try {
const caches = await getCaches();
const { paths: savePaths, key } = await getCacheConfig();
if (core.getState(stateKey) === key) {
core.info(`Cache up-to-date.`);
return;
}
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround();
const registryName = await getRegistryName();
const packages = await getPackages();
await pruneTarget(packages);
if (registryName) {
// save the index based on its revision
const indexRef = await getIndexRef(registryName);
caches.index.key = `registry-index-${indexRef}`;
await io.rmRF(path.join(paths.index, registryName, ".cache"));
try {
await cleanRegistry(registryName, packages);
} catch {}
await pruneRegistryCache(registryName, packages);
} else {
delete (caches as any).index;
delete (caches as any).cache;
}
try {
await cleanBin();
} catch {}
for (const [name, { path, key }] of Object.entries(caches)) {
if (core.getState(name) === key) {
core.info(`Cache for "${path}" up-to-date.`);
continue;
}
try {
core.startGroup(`Saving "${path}" to cache key "${key}"…`);
if (await cache.saveCache([path], key)) {
core.info(`Saved "${path}" to cache key "${key}".`);
}
} catch (e) {
core.info(`[warning] ${e.message}`);
} finally {
core.endGroup();
}
}
try {
await cleanGit(packages);
} catch {}
try {
await cleanTarget(packages);
} catch {}
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using key:\n ${key}`);
await cache.saveCache(savePaths, key);
} catch (e) {
core.info(`[warning] ${e.message}`);
}
@@ -51,83 +59,99 @@ async function run() {
run();
async function getIndexRef(registryName: string) {
const cwd = path.join(paths.index, registryName);
return (await getCmdOutput("git", ["rev-parse", "--short", "origin/master"], { cwd })).trim();
async function getRegistryName(): Promise<string> {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.warning(`got multiple registries: "${files.join('", "')}"`);
}
const first = files.shift()!;
return path.basename(path.dirname(first));
}
interface PackageDefinition {
name: string;
version: string;
async function cleanBin() {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(stateBins));
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(paths.cargoHome, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
type Packages = Array<PackageDefinition>;
async function cleanRegistry(registryName: string, packages: Packages) {
await io.rmRF(path.join(paths.index, registryName, ".cache"));
async function getPackages(): Promise<Packages> {
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--format-version", "1"]));
return meta.packages.map(({ name, version }: any) => ({ name, version }));
}
async function pruneRegistryCache(registryName: string, packages: Packages) {
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
for await (const dirent of dir) {
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
const fileName = path.join(dir.path, dirent.name);
await fs.promises.unlink(fileName);
core.debug(`deleting "${fileName}"`);
await rm(dir.path, dirent);
}
}
}
async function pruneTarget(packages: Packages) {
await fs.promises.unlink("./target/.rustc_info.json");
await io.rmRF("./target/debug/examples");
await io.rmRF("./target/debug/incremental");
async function cleanGit(packages: Packages) {
const coPath = path.join(paths.git, "checkouts");
const dbPath = path.join(paths.git, "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
let dir: fs.Dir;
// remove all *files* from debug
dir = await fs.promises.opendir("./target/debug");
// clean the db
dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (dirent.isFile()) {
const fileName = path.join(dir.path, dirent.name);
await fs.promises.unlink(fileName);
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept("./target/debug/build", keepPkg);
await rmExcept("./target/debug/.fingerprint", keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const name = p.name.replace(/-/g, "_");
return [name, `lib${name}`];
}),
);
await rmExcept("./target/debug/deps", keepDeps);
}
const twoWeeks = 14 * 24 * 3600 * 1000;
async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
// clean the checkouts
dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > twoWeeks) {
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
}
async function macOsWorkaround() {
try {
// Workaround for https://github.com/actions/cache/issues/403
// Also see https://github.com/rust-lang/cargo/issues/8603
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
} catch {}
}

View File

@@ -4,7 +4,7 @@
"diagnostics": true,
"lib": ["esnext"],
"target": "es2017",
"target": "es2020",
"resolveJsonModule": true,
"moduleResolution": "node",