48 Commits
v1.0.2 ... v1

Author SHA1 Message Date
Arpad Borsos
81d053bdb0 update dependencies and rebuild 2022-06-26 12:35:07 +02:00
Vlad-Shcherbina
5040f39404 Skip registry cleaning if no registry was found (#65)
This fixes #64.

When Cargo is run in sparse-registry mode, it doesn't create
  ~/.cargo/registry/index/github.com-1ecc6299db9ec823/
directory.
2022-06-26 10:51:36 +02:00
Vlad-Shcherbina
2055a01dcd Improve diagnostics: show exception stack trace (#63)
Also add logging to the quiet exception handlers.
2022-06-25 23:20:01 +02:00
Arpad Borsos
cb2cf0cc7c 1.4.0 2022-04-08 17:27:07 +02:00
Arpad Borsos
74e8e24b6d Update dependencies, clean both debug and release targets 2022-04-08 17:25:03 +02:00
Arpad Borsos
f8f67b7515 Add a LICENSE file
fixes #50
2022-03-05 10:21:49 +01:00
Ed Morley
5b2b053862 Improve Cache Details documentation (#49)
Updates the documentation on how the cache works to more
closely match the implementation and to explain some of the design decisions.
2022-03-05 10:04:16 +01:00
Arpad Borsos
3bb3a9a087 update deps and rebuild 2021-12-24 08:55:35 +01:00
Arpad Borsos
d127014599 update dependencies 2021-09-28 17:49:43 +02:00
Sergey Nikitin
801365cd81 hint that checkout has to be used first (#34) 2021-09-28 16:03:51 +02:00
Arpad Borsos
c5ed9ba6b7 update dependencies and rebuild 2021-06-28 23:21:42 +02:00
Tom Parker-Shemilt
536c94f32c Cache-on-failure support (#22) 2021-06-28 23:18:07 +02:00
Arpad Borsos
842ef286ff update dependencies and rebuild 2021-05-30 11:05:48 +02:00
Arpad Borsos
1b344a0a23 prepare new version 2021-05-30 11:04:10 +02:00
Arpad Borsos
31c41a926e Handle missing cargo installs gracefully
fixes #17
2021-05-30 10:55:21 +02:00
Arpad Borsos
ebd95456c3 rebuild 2021-05-19 10:11:17 +02:00
Arpad Borsos
3b8bbcb11d add description 2021-05-19 10:10:43 +02:00
Rik Nauta
f82d41bcc2 feat: allow for configurable target-dir 2021-05-19 10:06:31 +02:00
Arpad Borsos
063471b9dd update dependencies 2021-05-19 10:05:17 +02:00
Arpad Borsos
ce325b6065 rebuild 2021-03-19 17:10:47 +01:00
Caleb Maclennan
da42bbe56d Additionally key on Rust toolchain file(s) if present 2021-03-15 14:07:46 +01:00
Arpad Borsos
a9bca6b5a6 1.2.0 2021-02-16 09:09:13 +01:00
Arpad Borsos
b17d52110e Add Changelog 2021-02-16 09:08:48 +01:00
Arpad Borsos
b495963495 Add a selftest and support for .cargo/bin 2021-02-16 09:06:04 +01:00
Arpad Borsos
83aad8d470 rebuild 2021-01-28 18:42:00 +01:00
Arpad Borsos
958028d559 document cache-hit output
fixes #5
2021-01-28 18:40:43 +01:00
Austin Jones
27793b3b80 Add support for the cache-hit output 2021-01-28 18:39:18 +01:00
Arpad Borsos
be44a3e6ff introduce a new sharedKey option
fixes #6
2021-01-28 18:39:11 +01:00
Arpad Borsos
2639a56bb8 implement support for CARGO_HOME
fixes #8
2021-01-28 18:16:36 +01:00
Arpad Borsos
cbcc887094 update deps 2021-01-28 18:08:24 +01:00
Arpad Borsos
ae893481e8 Write a few Notes in the Readme 2021-01-10 09:42:14 +01:00
Arpad Borsos
d7bda0e369 update dependencies 2021-01-10 09:30:57 +01:00
Arpad Borsos
9c05405335 1.1.0 2020-12-07 23:58:20 +01:00
Arpad Borsos
08d3994b7a rebuild 2020-12-07 23:58:13 +01:00
Arpad Borsos
9e10a44ea3 support working-directory input, cleanup 2020-12-07 23:56:50 +01:00
Arpad Borsos
fb2efae33d update readme 2020-11-21 12:21:26 +01:00
Arpad Borsos
da5df52d2f update deps 2020-11-21 12:11:25 +01:00
Arpad Borsos
0eea7b85d4 update dependencies and rebuild 2020-11-07 10:22:23 +01:00
Arpad Borsos
645c6972a6 update to npm v7 lockfile 2020-10-21 09:43:08 +02:00
Arpad Borsos
6ccf2463db deal with uncaught exceptions 2020-10-21 09:32:47 +02:00
Arpad Borsos
9cc357c650 update dependencies 2020-10-21 09:00:54 +02:00
Arpad Borsos
9de90d2338 only pre-clean when something was restored 2020-10-13 14:01:46 +02:00
Arpad Borsos
292ef23e77 avoid error when saving without git dependencies 2020-10-13 13:52:55 +02:00
Arpad Borsos
5f6034beb8 improve log output 2020-10-06 12:57:42 +02:00
Arpad Borsos
b740ae5d3a clean and persist the git db/checkouts 2020-10-05 18:18:59 +02:00
Arpad Borsos
e8e3c57b3b merge all the caches and simplify 2020-10-03 18:39:38 +02:00
Arpad Borsos
f77cb1be47 merge the registry caches together 2020-10-03 18:10:54 +02:00
Arpad Borsos
2bcc375de8 key target by job id automatically 2020-10-03 17:33:09 +02:00
17 changed files with 65442 additions and 49060 deletions

31
.github/workflows/selftest.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: CI
on: [push, pull_request]
jobs:
selftest:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test Action on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
override: true
- uses: ./
with:
cache-on-failure: true
- run: |
cargo install cargo-deny --locked
cargo check
cargo test

1
.gitignore vendored
View File

@@ -1 +1,2 @@
node_modules
/target

View File

@@ -1,5 +1,27 @@
# Changelog
## 1.4.0
- Clean both `debug` and `release` target directories.
## 1.3.0
- Use Rust toolchain file as additional cache key.
- Allow for a configurable target-dir.
## 1.2.0
- Cache `~/.cargo/bin`.
- Support for custom `$CARGO_HOME`.
- Add a `cache-hit` output.
- Add a new `sharedKey` option that overrides the automatic job-name based key.
## 1.1.0
- Add a new `working-directory` input.
- Support caching git dependencies.
- Lots of other improvements.
## 1.0.2
- Dont prune targets that have a different name from the crate, but do prune targets from the workspace.

1665
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

10
Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
publish = false
name = "rust-cache"
version = "0.1.0"
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
edition = "2018"
[dev-dependencies]
reqwest = "0.11.0"
actix-web = { git = "https://github.com/actix/actix-web.git", rev = "bd26083f333ecf63e3eb444748250364ce124f5e" }

165
LICENSE Normal file
View File

@@ -0,0 +1,165 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.

120
README.md
View File

@@ -1,47 +1,111 @@
# Rust Cache Action
A GitHub Action that implements smart caching for rust/cargo projects
## Inputs
- `key` - An optional key for the `target` cache. This is useful in case you
have different jobs for test / check / clippy, etc
A GitHub Action that implements smart caching for rust/cargo projects with
sensible defaults.
## Example usage
```yaml
- uses: Swatinem/rust-cache@v1
- uses: actions/checkout@v2
# selecting a toolchain either by action or manual `rustup` calls should happen
# before the plugin, as it uses the current rustc version as its cache key
- uses: actions-rs/toolchain@v1
with:
key: test
profile: minimal
toolchain: stable
- uses: Swatinem/rust-cache@v1
```
## Specifics
## Inputs
This action tries to be better than just caching the following directories:
: `key`
An optional key that is added to the automatic cache key.
```
~/.cargo/registry
~/.cargo/git
target
```
: `sharedKey`
An additional key that is stable over multiple jobs.
It disables incremental compilation and only caches dependencies. The
assumption is that we will likely recompile the own crate(s) anyway.
: `working-directory`
The working directory the action operates in, is case the cargo project is not
located in the repo root.
It also separates the cache into 4 groups, each treated differently:
: `target-dir`
The target directory that should be cleaned and persisted, defaults to `./target`.
- Registry Index: `~/.cargo/registry/index/<registry>`:
: `cache-on-failure`
Cache even if the build fails, defaults to false
This is always restored from its latest snapshot, and persisted based on the
most recent revision.
## Outputs
- Registry Cache: `~/.cargo/registry/cache/<registry>`:
: `cache-hit`
Automatically keyed by the lockfile/toml hash, and is being pruned to only
persist the dependencies that are being used.
This is a boolean flag that will be set to `true` when there was an exact cache hit.
- target: `./target`
## Cache Effectiveness
Automatically keyed by the lockfile/toml hash, and is being pruned to only
persist the dependencies that are being used. This is especially throwing
away any intermediate artifacts.
This action only caches the _dependencies_ of a crate, so is more effective if
the dependency / own code ratio is higher.
It is also most effective for repositories with a `Cargo.lock` file. Library
repositories with only a `Cargo.toml` file have limited benefits, as cargo will
_always_ use the most up-to-date dependency versions, which may not be cached.
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
Using it with Nightly Rust is less effective as it will throw away the cache every day.
## Versioning
I use the `v1` branch similar to `master` development, so if you want to have
a more stable experience, please use a fixed revision or tag.
## Cache Details
This action currently caches the following files/directories:
- `~/.cargo/bin`
- `~/.cargo/registry/index`
- `~/.cargo/registry/cache`
- `~/.cargo/git`
- `~/.cargo/.crates.toml`
- `~/.cargo/.crates2.json`
- `./target`
This cache is automatically keyed by:
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
- the rustc release / host / hash, and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
An additional input `key` can be provided if the builtin keys are not sufficient.
Before being persisted, the cache is cleaned of:
- Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`).
- Dependencies that are no longer used.
- Anything that is not a dependency.
- Incremental build artifacts.
- Any build artifacts with an `mtime` older than one week.
In particular, the workspace crates themselves are not cached since doing so is
[generally not effective](https://github.com/Swatinem/rust-cache/issues/37#issuecomment-944697938).
For this reason, this action automatically sets `CARGO_INCREMENTAL=0` to disable
incremental compilation, so that the Rust compiler doesn't waste time creating
the additional artifacts required for incremental builds.
The `~/.cargo/registry/src` directory is not cached since it is quicker for Cargo
to recreate it from the compressed crate archives in `~/.cargo/registry/cache`.
The action will try to restore from a previous `Cargo.lock` version as well, so
lockfile updates should only re-build changed dependencies.
Additionally, the action automatically works around
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
[actions/cache#403](https://github.com/actions/cache/issues/403) which would
otherwise corrupt the cache on macOS builds.
## Known issues
- The cache cleaning process currently only runs against the build artifacts under
`./target/debug/`, so projects using release or cross-compiled builds will experience
larger cache sizes.

View File

@@ -1,15 +1,30 @@
name: "Rust Cache"
description: "A GitHub Action that implements smart caching for rust/cargo projects"
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
author: "Arpad Borsos <arpad.borsos@googlemail.com>"
inputs:
key:
description: "An explicit key for restoring and saving the target cache"
description: "An additional key for the cache"
required: false
sharedKey:
description: "An additional cache key that is stable over multiple jobs"
required: false
working-directory:
description: "The working directory this action should operate in"
required: false
target-dir:
description: "The target dir that should be cleaned and persisted, defaults to `./target`"
required: false
cache-on-failure:
description: "Cache even if the build fails. Defaults to false"
required: false
outputs:
cache-hit:
description: "A boolean value that indicates an exact match was found"
runs:
using: "node12"
main: "dist/restore/index.js"
post: "dist/save/index.js"
post-if: "success()"
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"
branding:
icon: "archive"
color: "gray-dark"

55287
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

55528
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

1084
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
{
"private": true,
"name": "rust-cache",
"version": "1.0.2",
"description": "A GitHub Action that implements smart caching for rust/cargo projects",
"version": "1.4.0",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [
"actions",
"rust",
@@ -17,20 +17,22 @@
"bugs": {
"url": "https://github.com/Swatinem/rust-cache/issues"
},
"funding": "https://github.com/sponsors/Swatinem",
"funding": {
"url": "https://github.com/sponsors/Swatinem"
},
"homepage": "https://github.com/Swatinem/rust-cache#readme",
"dependencies": {
"@actions/cache": "^1.0.2",
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.1.0",
"@actions/io": "^1.0.2"
"@actions/cache": "^3.0.0",
"@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1",
"@actions/glob": "^0.3.0",
"@actions/io": "^1.1.2"
},
"devDependencies": {
"@vercel/ncc": "^0.24.1",
"typescript": "^4.0.3"
"@vercel/ncc": "^0.34.0",
"typescript": "4.7.4"
},
"scripts": {
"prepare": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts"
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
}
}

View File

@@ -1,31 +1,44 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import path from "path";
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.info(e.stack);
}
});
const cwd = core.getInput("working-directory");
// TODO: this could be read from .cargo config file directly
const targetDir = core.getInput("target-dir") || "./target";
if (cwd) {
process.chdir(cwd);
}
export const stateBins = "RUST_CACHE_BINS";
export const stateKey = "RUST_CACHE_KEY";
const stateHash = "RUST_CACHE_HASH";
const home = os.homedir();
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
export const paths = {
index: path.join(home, ".cargo/registry/index"),
cache: path.join(home, ".cargo/registry/cache"),
git: path.join(home, ".cargo/git/db"),
target: "target",
cargoHome,
index: path.join(cargoHome, "registry/index"),
cache: path.join(cargoHome, "registry/cache"),
git: path.join(cargoHome, "git"),
target: targetDir,
};
interface CacheConfig {
name: string;
path: string;
paths: Array<string>;
key: string;
restoreKeys?: Array<string>;
}
interface Caches {
index: CacheConfig;
cache: CacheConfig;
// git: CacheConfig;
target: CacheConfig;
restoreKeys: Array<string>;
}
const RefKey = "GITHUB_REF";
@@ -34,51 +47,67 @@ export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
}
export async function getCaches(): Promise<Caches> {
const rustKey = await getRustKey();
let lockHash = core.getState("lockHash");
export async function getCacheConfig(): Promise<CacheConfig> {
let lockHash = core.getState(stateHash);
if (!lockHash) {
lockHash = await getLockfileHash();
core.saveState("lockHash", lockHash);
}
let targetKey = core.getInput("key");
if (targetKey) {
targetKey = `${targetKey}-`;
core.saveState(stateHash, lockHash);
}
const registryIndex = `v0-registry-index`;
const registryCache = `v0-registry-cache`;
const target = `v0-target-${targetKey}${rustKey}`;
let key = `v0-rust-`;
const sharedKey = core.getInput("sharedKey");
if (sharedKey) {
key += `${sharedKey}-`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `${inputKey}-`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `${job}-`;
}
}
key += await getRustKey();
return {
index: {
name: "Registry Index",
path: paths.index,
key: `${registryIndex}-`,
restoreKeys: [registryIndex],
},
cache: {
name: "Registry Cache",
path: paths.cache,
key: `${registryCache}-${lockHash}`,
restoreKeys: [registryCache],
},
// git: {
// name: "Git Dependencies",
// path: paths.git,
// key: "git-db",
// },
target: {
name: "Target",
path: paths.target,
key: `${target}-${lockHash}`,
restoreKeys: [target],
},
paths: [
path.join(cargoHome, "bin"),
path.join(cargoHome, ".crates2.json"),
path.join(cargoHome, ".crates.toml"),
paths.git,
paths.cache,
paths.index,
paths.target,
],
key: `${key}-${lockHash}`,
restoreKeys: [key],
};
}
export async function getCargoBins(): Promise<Set<string>> {
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
);
const bins = new Set<string>();
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
return bins;
} catch {
return new Set<string>();
}
}
async function getRustKey(): Promise<string> {
const rustc = await getRustVersion();
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"]}`;
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
}
interface RustVersion {
@@ -115,22 +144,10 @@ export async function getCmdOutput(
return stdout;
}
export async function getRegistryName() {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.debug(`got multiple registries: "${files.join('", "')}"`);
}
const first = files.shift();
if (!first) {
return;
}
return path.basename(path.dirname(first));
}
async function getLockfileHash() {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false });
async function getLockfileHash(): Promise<string> {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
followSymbolicLinks: false,
});
const files = await globber.glob();
files.sort((a, b) => a.localeCompare(b));
@@ -140,5 +157,109 @@ async function getLockfileHash() {
hasher.update(chunk);
}
}
return hasher.digest("hex");
return hasher.digest("hex").slice(0, 20);
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}
export async function getPackages(): Promise<Packages> {
const cwd = process.cwd();
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
return meta.packages
.filter((p) => !p.manifest_path.startsWith(cwd))
.map((p) => {
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
return { name: p.name, version: p.version, targets, path: path.dirname(p.manifest_path) };
});
}
export async function cleanTarget(packages: Packages) {
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
await cleanProfileTarget(packages, "debug");
await cleanProfileTarget(packages, "release");
}
async function cleanProfileTarget(packages: Packages, profile: string) {
try {
await fs.promises.access(path.join(targetDir, profile));
} catch {
return;
}
await io.rmRF(path.join(targetDir, profile, "./examples"));
await io.rmRF(path.join(targetDir, profile, "./incremental"));
let dir: fs.Dir;
// remove all *files* from the profile directory
dir = await fs.promises.opendir(path.join(targetDir, profile));
for await (const dirent of dir) {
if (dirent.isFile()) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(targetDir, profile, "./build"), keepPkg);
await rmExcept(path.join(targetDir, profile, "./.fingerprint"), keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(targetDir, profile, "./deps"), keepDeps);
}
const oneWeek = 7 * 24 * 3600 * 1000;
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
// we dont really know
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
await rm(dir.path, dirent);
}
}
}
export async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch {}
}

3
src/main.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
println!("Hello, world!");
}

View File

@@ -1,41 +1,56 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { getCaches, isValidEvent } from "./common";
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
async function run() {
if (!isValidEvent()) {
if (!cache.isFeatureAvailable()) {
setCacheHitOutput(false);
return;
}
try {
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
if (cacheOnFailure !== "true") {
cacheOnFailure = "false";
}
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0);
const caches = await getCaches();
for (const [type, { name, path, key, restoreKeys }] of Object.entries(caches)) {
const start = Date.now();
core.startGroup(`Restoring ${name}`);
core.info(`Restoring to path "${path}".`);
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
try {
const restoreKey = await cache.restoreCache([path], key, restoreKeys);
if (restoreKey) {
core.info(`Restored from cache key "${restoreKey}".`);
core.saveState(type, restoreKey);
} else {
core.info("No cache found.");
}
} catch (e) {
core.info(`[warning] ${e.message}`);
const { paths, key, restoreKeys } = await getCacheConfig();
const bins = await getCargoBins();
core.saveState(stateBins, JSON.stringify([...bins]));
core.info(`Restoring paths:\n ${paths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
if (restoreKey) {
core.info(`Restored from cache key "${restoreKey}".`);
core.saveState(stateKey, restoreKey);
if (restoreKey !== key) {
// pre-clean the target directory on cache mismatch
const packages = await getPackages();
await cleanTarget(packages);
}
const duration = Math.round((Date.now() - start) / 1000);
if (duration) {
core.info(`Took ${duration}s.`);
}
core.endGroup();
setCacheHitOutput(restoreKey === key);
} else {
core.info("No cache found.");
setCacheHitOutput(false);
}
} catch (e) {
core.info(`[warning] ${e.message}`);
setCacheHitOutput(false);
core.info(`[warning] ${(e as any).stack}`);
}
}
function setCacheHitOutput(cacheHit: boolean): void {
core.setOutput("cache-hit", cacheHit.toString());
}
run();

View File

@@ -1,162 +1,165 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import { getCaches, getCmdOutput, getRegistryName, isValidEvent, paths } from "./common";
import {
cleanTarget,
getCacheConfig,
getCargoBins,
getPackages,
Packages,
paths,
rm,
stateBins,
stateKey,
} from "./common";
async function run() {
if (!isValidEvent()) {
if (!cache.isFeatureAvailable()) {
return;
}
try {
const caches = await getCaches();
const registryName = await getRegistryName();
const packages = await getPackages();
const { paths: savePaths, key } = await getCacheConfig();
if (core.getState(stateKey) === key) {
core.info(`Cache up-to-date.`);
return;
}
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround();
await pruneTarget(packages);
const registryName = await getRegistryName();
const packages = await getPackages();
if (registryName) {
// save the index based on its revision
const indexRef = await getIndexRef(registryName);
caches.index.key += indexRef;
await io.rmRF(path.join(paths.index, registryName, ".cache"));
await pruneRegistryCache(registryName, packages);
} else {
delete (caches as any).index;
delete (caches as any).cache;
}
for (const [type, { name, path, key }] of Object.entries(caches)) {
if (core.getState(type) === key) {
core.info(`${name} up-to-date.`);
continue;
}
const start = Date.now();
core.startGroup(`Saving ${name}`);
core.info(`Saving path "${path}".`);
core.info(`Using key "${key}".`);
try {
await cache.saveCache([path], key);
await cleanRegistry(registryName, packages);
} catch (e) {
core.info(`[warning] ${e.message}`);
core.info(`[warning] ${(e as any).stack}`);
}
const duration = Math.round((Date.now() - start) / 1000);
if (duration) {
core.info(`Took ${duration}s.`);
}
core.endGroup();
}
try {
await cleanBin();
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
try {
await cleanGit(packages);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
try {
await cleanTarget(packages);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using key:\n ${key}`);
await cache.saveCache(savePaths, key);
} catch (e) {
core.info(`[warning] ${e.message}`);
core.info(`[warning] ${(e as any).stack}`);
}
}
run();
async function getIndexRef(registryName: string) {
const cwd = path.join(paths.index, registryName);
return (await getCmdOutput("git", ["rev-parse", "--short", "origin/master"], { cwd })).trim();
async function getRegistryName(): Promise<string | null> {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.warning(`got multiple registries: "${files.join('", "')}"`);
}
const first = files.shift()!;
if (!first) {
return null;
}
return path.basename(path.dirname(first));
}
interface PackageDefinition {
name: string;
version: string;
targets: Array<string>;
async function cleanBin() {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(stateBins));
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(paths.cargoHome, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
type Packages = Array<PackageDefinition>;
async function cleanRegistry(registryName: string, packages: Packages) {
await io.rmRF(path.join(paths.index, registryName, ".cache"));
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}
async function getPackages(): Promise<Packages> {
const cwd = process.cwd();
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
return meta.packages
.filter((p) => !p.manifest_path.startsWith(cwd))
.map((p) => {
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
return { name: p.name, version: p.version, targets };
});
}
async function pruneRegistryCache(registryName: string, packages: Packages) {
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
for await (const dirent of dir) {
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
const fileName = path.join(dir.path, dirent.name);
await fs.promises.unlink(fileName);
core.debug(`deleting "${fileName}"`);
await rm(dir.path, dirent);
}
}
}
async function pruneTarget(packages: Packages) {
await fs.promises.unlink("./target/.rustc_info.json");
await io.rmRF("./target/debug/examples");
await io.rmRF("./target/debug/incremental");
async function cleanGit(packages: Packages) {
const coPath = path.join(paths.git, "checkouts");
const dbPath = path.join(paths.git, "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
let dir: fs.Dir;
// remove all *files* from debug
dir = await fs.promises.opendir("./target/debug");
// clean the db
dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (dirent.isFile()) {
const fileName = path.join(dir.path, dirent.name);
await fs.promises.unlink(fileName);
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept("./target/debug/build", keepPkg);
await rmExcept("./target/debug/.fingerprint", keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept("./target/debug/deps", keepDeps);
}
const twoWeeks = 14 * 24 * 3600 * 1000;
async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
// clean the checkouts
dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > twoWeeks) {
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}

View File

@@ -4,7 +4,7 @@
"diagnostics": true,
"lib": ["esnext"],
"target": "es2017",
"target": "es2020",
"resolveJsonModule": true,
"moduleResolution": "node",