mirror of
https://github.com/Swatinem/rust-cache.git
synced 2025-12-27 01:53:59 -05:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5ec9edd91 | ||
|
|
3f2513fdf4 | ||
|
|
19c46583c5 | ||
|
|
b8e72aae83 |
@@ -1,5 +1,9 @@
|
||||
# Changelog
|
||||
|
||||
## 2.0.2
|
||||
|
||||
- Avoid calling `cargo metadata` on pre-cleanup.
|
||||
|
||||
## 2.0.1
|
||||
|
||||
- Primarily just updating dependencies to fix GitHub deprecation notices.
|
||||
|
||||
24
README.md
24
README.md
@@ -14,32 +14,42 @@ sensible defaults.
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
# An explicit cache key that is used instead of the automatic `job`-based
|
||||
# cache key and is thus stable across jobs.
|
||||
# Default: empty
|
||||
# The prefix cache key, this can be changed to start a new cache manually
|
||||
# default: "v0-rust"
|
||||
prefix-key: ""
|
||||
|
||||
# An additional cache key that is stable over multiple jobs
|
||||
# default: empty
|
||||
shared-key: ""
|
||||
|
||||
# An additional cache key that is added alongside the automatic `job`-based
|
||||
# cache key and can be used to further differentiate jobs.
|
||||
# Default: empty
|
||||
# default: empty
|
||||
key: ""
|
||||
|
||||
# A whitespace separated list of env-var *prefixes* who's value contributes
|
||||
# to the environment cache key.
|
||||
# The env-vars are matched by *prefix*, so the default `RUST` var will
|
||||
# match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc.
|
||||
# Default: "CARGO CC CFLAGS CXX CMAKE RUST"
|
||||
# default: "CARGO CC CFLAGS CXX CMAKE RUST"
|
||||
env-vars: ""
|
||||
|
||||
# The cargo workspaces and target directory configuration.
|
||||
# These entries are separated by newlines and have the form
|
||||
# `$workspace -> $target`. The `$target` part is treated as a directory
|
||||
# relative to the `$workspace` and defaults to "target" if not explicitly given.
|
||||
# Default: ". -> target"
|
||||
# default: ". -> target"
|
||||
workspaces: ""
|
||||
|
||||
# Additional non workspace directories, separated by newlines
|
||||
cache-directories: ""
|
||||
|
||||
# Determines whether workspace `target` directories are cached.
|
||||
# default: "true"
|
||||
cache-targets: ""
|
||||
|
||||
# Determines if the cache should be saved even when the workflow has failed.
|
||||
# Default: "false"
|
||||
# default: "false"
|
||||
cache-on-failure: ""
|
||||
```
|
||||
|
||||
|
||||
11
action.yml
11
action.yml
@@ -2,6 +2,10 @@ name: "Rust Cache"
|
||||
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
|
||||
author: "Arpad Borsos <swatinem@swatinem.de>"
|
||||
inputs:
|
||||
prefix-key:
|
||||
description: "The prefix cache key, this can be changed to start a new cache manually"
|
||||
required: false
|
||||
default: "v0-rust"
|
||||
shared-key:
|
||||
description: "An additional cache key that is stable over multiple jobs"
|
||||
required: false
|
||||
@@ -14,6 +18,13 @@ inputs:
|
||||
workspaces:
|
||||
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines"
|
||||
required: false
|
||||
cache-directories:
|
||||
description: "Additional non workspace directories, separated by newlines"
|
||||
required: false
|
||||
cache-targets:
|
||||
description: "Determines whether workspace targets are cached"
|
||||
required: false
|
||||
default: "true"
|
||||
cache-on-failure:
|
||||
description: "Cache even if the build fails. Defaults to false"
|
||||
required: false
|
||||
|
||||
44
dist/restore/index.js
vendored
44
dist/restore/index.js
vendored
@@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
||||
|
||||
|
||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||
class Workspace {
|
||||
constructor(root, target) {
|
||||
@@ -64485,9 +64486,11 @@ class Workspace {
|
||||
async getPackages() {
|
||||
let packages = [];
|
||||
try {
|
||||
lib_core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}));
|
||||
lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
@@ -64545,7 +64548,7 @@ class CacheConfig {
|
||||
// Construct key prefix:
|
||||
// This uses either the `shared-key` input,
|
||||
// or the `key` input combined with the `job` key.
|
||||
let key = `v0-rust`;
|
||||
let key = lib_core.getInput("prefix-key");
|
||||
const sharedKey = lib_core.getInput("shared-key");
|
||||
if (sharedKey) {
|
||||
key += `-${sharedKey}`;
|
||||
@@ -64630,7 +64633,15 @@ class CacheConfig {
|
||||
workspaces.push(new Workspace(root, target));
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
||||
self.cachePaths = [config_CARGO_HOME];
|
||||
const cacheTargets = lib_core.getInput("cache-targets").toLowerCase();
|
||||
if (cacheTargets === "true") {
|
||||
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||
}
|
||||
const cacheDirectories = lib_core.getInput("cache-directories");
|
||||
for (const dir of cacheDirectories.trim().split("\n")) {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
printInfo() {
|
||||
@@ -64832,30 +64843,34 @@ async function cleanGit(packages) {
|
||||
}
|
||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let name = dirent.name;
|
||||
// strip the trailing hash
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
@@ -64925,8 +64940,7 @@ async function run() {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
const packages = await workspace.getPackages();
|
||||
await cleanTargetDir(workspace.target, packages, true);
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
41
dist/save/index.js
vendored
41
dist/save/index.js
vendored
@@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
||||
|
||||
|
||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||
class Workspace {
|
||||
constructor(root, target) {
|
||||
@@ -64485,9 +64486,11 @@ class Workspace {
|
||||
async getPackages() {
|
||||
let packages = [];
|
||||
try {
|
||||
core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}));
|
||||
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
@@ -64545,7 +64548,7 @@ class CacheConfig {
|
||||
// Construct key prefix:
|
||||
// This uses either the `shared-key` input,
|
||||
// or the `key` input combined with the `job` key.
|
||||
let key = `v0-rust`;
|
||||
let key = core.getInput("prefix-key");
|
||||
const sharedKey = core.getInput("shared-key");
|
||||
if (sharedKey) {
|
||||
key += `-${sharedKey}`;
|
||||
@@ -64630,7 +64633,15 @@ class CacheConfig {
|
||||
workspaces.push(new Workspace(root, target));
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
||||
self.cachePaths = [CARGO_HOME];
|
||||
const cacheTargets = core.getInput("cache-targets").toLowerCase();
|
||||
if (cacheTargets === "true") {
|
||||
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||
}
|
||||
const cacheDirectories = core.getInput("cache-directories");
|
||||
for (const dir of cacheDirectories.trim().split("\n")) {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
printInfo() {
|
||||
@@ -64832,30 +64843,34 @@ async function cleanGit(packages) {
|
||||
}
|
||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let name = dirent.name;
|
||||
// strip the trailing hash
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
||||
20
package-lock.json
generated
20
package-lock.json
generated
@@ -1,15 +1,15 @@
|
||||
{
|
||||
"name": "rust-cache",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "rust-cache",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.2",
|
||||
"license": "LGPL-3.0",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.0.5",
|
||||
"@actions/cache": "^3.0.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.3.0",
|
||||
@@ -24,15 +24,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
||||
"version": "3.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"semver": "^6.1.0",
|
||||
@@ -667,15 +668,16 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/cache": {
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
||||
"version": "3.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"semver": "^6.1.0",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "rust-cache",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.2",
|
||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||
"keywords": [
|
||||
"actions",
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.0.5",
|
||||
"@actions/cache": "^3.0.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.3.0",
|
||||
|
||||
@@ -180,17 +180,29 @@ export async function cleanGit(packages: Packages) {
|
||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
|
||||
const dir = await fs.promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let name = dirent.name;
|
||||
|
||||
// strip the trailing hash
|
||||
@@ -199,14 +211,7 @@ async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ export class CacheConfig {
|
||||
// This uses either the `shared-key` input,
|
||||
// or the `key` input combined with the `job` key.
|
||||
|
||||
let key = `v0-rust`;
|
||||
let key = core.getInput("prefix-key");
|
||||
|
||||
const sharedKey = core.getInput("shared-key");
|
||||
if (sharedKey) {
|
||||
@@ -154,7 +154,16 @@ export class CacheConfig {
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
|
||||
self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
||||
self.cachePaths = [CARGO_HOME];
|
||||
const cacheTargets = core.getInput("cache-targets").toLowerCase();
|
||||
if (cacheTargets === "true") {
|
||||
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||
}
|
||||
|
||||
const cacheDirectories = core.getInput("cache-directories");
|
||||
for (const dir of cacheDirectories.trim().split("\n")) {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
@@ -43,9 +43,7 @@ async function run() {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
const packages = await workspace.getPackages();
|
||||
|
||||
await cleanTargetDir(workspace.target, packages, true);
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import * as core from "@actions/core";
|
||||
import path from "path";
|
||||
|
||||
import { getCmdOutput } from "./utils";
|
||||
@@ -10,11 +11,13 @@ export class Workspace {
|
||||
public async getPackages(): Promise<Packages> {
|
||||
let packages: Packages = [];
|
||||
try {
|
||||
core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta: Meta = JSON.parse(
|
||||
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}),
|
||||
);
|
||||
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
|
||||
Reference in New Issue
Block a user