mirror of
https://github.com/Swatinem/rust-cache.git
synced 2025-12-27 01:53:59 -05:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2656b87321 | ||
|
|
715970feed | ||
|
|
3d4000164d | ||
|
|
988c164c3d | ||
|
|
bb80d0f127 | ||
|
|
ad97570a01 |
@@ -1,5 +1,10 @@
|
||||
# Changelog
|
||||
|
||||
## 2.4.0
|
||||
|
||||
- Fix cache key stability.
|
||||
- Use 8 character hash components to reduce the key length, making it more readable.
|
||||
|
||||
## 2.3.0
|
||||
|
||||
- Add `cache-all-crates` option, which enables caching of crates installed by workflows.
|
||||
|
||||
@@ -101,7 +101,6 @@ This cache is automatically keyed by:
|
||||
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
|
||||
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||
- a hash of installed packages as generated by `cargo install --list`.
|
||||
|
||||
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||
|
||||
@@ -137,7 +136,7 @@ otherwise corrupt the cache on macOS builds.
|
||||
This specialized cache action is built on top of the upstream cache action
|
||||
maintained by GitHub. The same restrictions and limits apply, which are
|
||||
documented here:
|
||||
https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
|
||||
[Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
|
||||
|
||||
In particular, caches are currently limited to 10 GB in total and exceeding that
|
||||
limit will cause eviction of older caches.
|
||||
|
||||
4300
dist/restore/index.js
vendored
4300
dist/restore/index.js
vendored
File diff suppressed because it is too large
Load Diff
4301
dist/save/index.js
vendored
4301
dist/save/index.js
vendored
File diff suppressed because it is too large
Load Diff
17
package-lock.json
generated
17
package-lock.json
generated
@@ -1,19 +1,20 @@
|
||||
{
|
||||
"name": "rust-cache",
|
||||
"version": "2.3.0",
|
||||
"version": "2.5.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "rust-cache",
|
||||
"version": "2.3.0",
|
||||
"version": "2.5.0",
|
||||
"license": "LGPL-3.0",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.4.0",
|
||||
"@actions/io": "^1.1.3"
|
||||
"@actions/io": "^1.1.3",
|
||||
"toml": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
@@ -507,6 +508,11 @@
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/toml": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz",
|
||||
"integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w=="
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
@@ -988,6 +994,11 @@
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
|
||||
},
|
||||
"toml": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz",
|
||||
"integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w=="
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "rust-cache",
|
||||
"version": "2.3.0",
|
||||
"version": "2.5.0",
|
||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||
"keywords": [
|
||||
"actions",
|
||||
@@ -26,7 +26,8 @@
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.4.0",
|
||||
"@actions/io": "^1.1.3"
|
||||
"@actions/io": "^1.1.3",
|
||||
"toml": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vercel/ncc": "^0.36.1",
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as io from "@actions/io";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
import { CARGO_HOME, STATE_BINS } from "./config";
|
||||
import { CARGO_HOME } from "./config";
|
||||
import { Packages } from "./workspace";
|
||||
|
||||
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
|
||||
@@ -69,9 +69,14 @@ export async function getCargoBins(): Promise<Set<string>> {
|
||||
return bins;
|
||||
}
|
||||
|
||||
export async function cleanBin() {
|
||||
/**
|
||||
* Clean the cargo bin directory, removing the binaries that existed
|
||||
* when the action started, as they were not created by the build.
|
||||
*
|
||||
* @param oldBins The binaries that existed when the action started.
|
||||
*/
|
||||
export async function cleanBin(oldBins: Array<string>) {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(STATE_BINS));
|
||||
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
@@ -186,10 +191,10 @@ const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
|
||||
/**
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
|
||||
233
src/config.ts
233
src/config.ts
@@ -1,20 +1,21 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as toml from "toml";
|
||||
import crypto from "crypto";
|
||||
import fs from "fs";
|
||||
import fs_promises from "fs/promises";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
|
||||
import { getCmdOutput } from "./utils";
|
||||
import { Workspace } from "./workspace";
|
||||
import { getCargoBins } from "./cleanup";
|
||||
|
||||
const HOME = os.homedir();
|
||||
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
|
||||
|
||||
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
|
||||
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
|
||||
export const STATE_BINS = "RUST_CACHE_BINS";
|
||||
export const STATE_KEY = "RUST_CACHE_KEY";
|
||||
const STATE_CONFIG = "RUST_CACHE_CONFIG";
|
||||
const HASH_LENGTH = 8;
|
||||
|
||||
export class CacheConfig {
|
||||
/** All the paths we want to cache */
|
||||
@@ -27,6 +28,9 @@ export class CacheConfig {
|
||||
/** The workspace configurations */
|
||||
public workspaces: Array<Workspace> = [];
|
||||
|
||||
/** The cargo binaries present during main step */
|
||||
public cargoBins: Array<string> = [];
|
||||
|
||||
/** The prefix portion of the cache key */
|
||||
private keyPrefix = "";
|
||||
/** The rust version considered for the cache key */
|
||||
@@ -104,24 +108,13 @@ export class CacheConfig {
|
||||
|
||||
self.keyEnvs = keyEnvs;
|
||||
|
||||
// Installed packages and their versions are also considered for the key.
|
||||
const packages = await getPackages();
|
||||
hasher.update(packages);
|
||||
|
||||
key += `-${hasher.digest("hex")}`;
|
||||
key += `-${digest(hasher)}`;
|
||||
|
||||
self.restoreKey = key;
|
||||
|
||||
// Construct the lockfiles portion of the key:
|
||||
// This considers all the files found via globbing for various manifests
|
||||
// and lockfiles.
|
||||
// This part is computed in the "pre"/"restore" part of the job and persisted
|
||||
// into the `state`. That state is loaded in the "post"/"save" part of the
|
||||
// job so we have consistent values even though the "main" actions run
|
||||
// might create/overwrite lockfiles.
|
||||
|
||||
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
||||
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
||||
|
||||
// Constructs the workspace config and paths to restore:
|
||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||
@@ -136,32 +129,97 @@ export class CacheConfig {
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
|
||||
if (!lockHash) {
|
||||
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(
|
||||
...(await globFiles(
|
||||
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||
)),
|
||||
);
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
|
||||
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
|
||||
|
||||
hasher = crypto.createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of fs.createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
hasher = crypto.createHash("sha1");
|
||||
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(
|
||||
...(await globFiles(
|
||||
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||
)),
|
||||
);
|
||||
|
||||
const cargo_manifests = sort_and_uniq(await globFiles(`${root}/**/Cargo.toml`));
|
||||
|
||||
for (const cargo_manifest of cargo_manifests) {
|
||||
try {
|
||||
const content = await fs_promises.readFile(cargo_manifest, { encoding: 'utf8' });
|
||||
const parsed = toml.parse(content);
|
||||
|
||||
if ("package" in parsed) {
|
||||
const pack = parsed.package;
|
||||
if ("version" in pack) {
|
||||
pack.version = "0.0.0";
|
||||
}
|
||||
}
|
||||
|
||||
for (const prefix of ["", "build-", "dev-"]) {
|
||||
const section_name = `${prefix}dependencies`;
|
||||
if (!(section_name in parsed)) {
|
||||
continue;
|
||||
}
|
||||
const deps = parsed[section_name];
|
||||
|
||||
for (const key of Object.keys(deps)) {
|
||||
const dep = deps[key];
|
||||
|
||||
if ("path" in dep) {
|
||||
dep.version = '0.0.0'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hasher.update(JSON.stringify(sort_object(parsed)));
|
||||
|
||||
parsedKeyFiles.push(cargo_manifest);
|
||||
} catch (_e) { // Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_manifest);
|
||||
}
|
||||
}
|
||||
lockHash = hasher.digest("hex");
|
||||
|
||||
core.saveState(STATE_LOCKFILE_HASH, lockHash);
|
||||
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
|
||||
const cargo_locks = sort_and_uniq(await globFiles(`${root}/**/Cargo.lock`));
|
||||
|
||||
for (const cargo_lock of cargo_locks) {
|
||||
try {
|
||||
const content = await fs_promises.readFile(cargo_lock, { encoding: 'utf8' });
|
||||
const parsed = toml.parse(content);
|
||||
|
||||
if (parsed.version !== 3 || !("package" in parsed)) {
|
||||
// Fallback to caching them as regular file since this action
|
||||
// can only handle Cargo.lock format version 3
|
||||
keyFiles.push(cargo_lock);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Package without `[[package]].source` and `[[package]].checksum`
|
||||
// are the one with `path = "..."` to crates within the workspace.
|
||||
const packages = parsed.package.filter((p: any) => {
|
||||
"source" in p || "checksum" in p
|
||||
});
|
||||
|
||||
hasher.update(JSON.stringify(sort_object(packages)));
|
||||
|
||||
parsedKeyFiles.push(cargo_lock);
|
||||
} catch (_e) { // Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_lock);
|
||||
}
|
||||
}
|
||||
}
|
||||
keyFiles = sort_and_uniq(keyFiles);
|
||||
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of fs.createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
self.keyFiles = keyFiles;
|
||||
let lockHash = digest(hasher);
|
||||
|
||||
keyFiles.push(...parsedKeyFiles);
|
||||
self.keyFiles = sort_and_uniq(keyFiles);
|
||||
|
||||
key += `-${lockHash}`;
|
||||
self.cacheKey = key;
|
||||
@@ -177,9 +235,37 @@ export class CacheConfig {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
|
||||
const bins = await getCargoBins();
|
||||
self.cargoBins = Array.from(bins.values());
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and returns the cache config from the action `state`.
|
||||
*
|
||||
* @throws {Error} if the state is not present.
|
||||
* @returns {CacheConfig} the configuration.
|
||||
* @see {@link CacheConfig#saveState}
|
||||
* @see {@link CacheConfig#new}
|
||||
*/
|
||||
static fromState(): CacheConfig {
|
||||
const source = core.getState(STATE_CONFIG);
|
||||
if (!source) {
|
||||
throw new Error("Cache configuration not found in state");
|
||||
}
|
||||
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w: any) => new Workspace(w.root, w.target));
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
core.startGroup("Cache Configuration");
|
||||
core.info(`Workspaces:`);
|
||||
@@ -207,6 +293,33 @@ export class CacheConfig {
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the configuration to the state store.
|
||||
* This is used to restore the configuration in the post action.
|
||||
*/
|
||||
saveState() {
|
||||
core.saveState(STATE_CONFIG, this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the cache is up to date.
|
||||
*
|
||||
* @returns `true` if the cache is up to date, `false` otherwise.
|
||||
*/
|
||||
export function isCacheUpToDate(): boolean {
|
||||
return core.getState(STATE_CONFIG) === "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
|
||||
*
|
||||
* @param hasher The hasher to digest.
|
||||
* @returns The hex digest.
|
||||
*/
|
||||
function digest(hasher: crypto.Hash): string {
|
||||
return hasher.digest("hex").substring(0, HASH_LENGTH);
|
||||
}
|
||||
|
||||
interface RustVersion {
|
||||
@@ -225,15 +338,49 @@ async function getRustVersion(): Promise<RustVersion> {
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
|
||||
async function getPackages(): Promise<string> {
|
||||
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
|
||||
// Make OS independent.
|
||||
return stdout.split(/[\n\r]+/).join("\n");
|
||||
}
|
||||
|
||||
async function globFiles(pattern: string): Promise<string[]> {
|
||||
const globber = await glob.create(pattern, {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
return await globber.glob();
|
||||
// fs.statSync resolve the symbolic link and returns stat for the
|
||||
// file it pointed to, so isFile would make sure the resolved
|
||||
// file is actually a regular file.
|
||||
return (await globber.glob()).filter(file => fs.statSync(file).isFile());
|
||||
}
|
||||
|
||||
function sort_and_uniq(a: string[]) {
|
||||
return a
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.reduce(
|
||||
(accumulator: string[], currentValue: string) => {
|
||||
const len = accumulator.length;
|
||||
// If accumulator is empty or its last element != currentValue
|
||||
// Since array is already sorted, elements with the same value
|
||||
// are grouped together to be continugous in space.
|
||||
//
|
||||
// If currentValue != last element, then it must be unique.
|
||||
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
|
||||
accumulator.push(currentValue);
|
||||
}
|
||||
return accumulator;
|
||||
},
|
||||
[]
|
||||
);
|
||||
}
|
||||
|
||||
function sort_object(o: any): any {
|
||||
if (Array.isArray(o)) {
|
||||
return o.sort().map(sort_object);
|
||||
} else if (typeof o === 'object' && o != null) {
|
||||
return Object
|
||||
.keys(o)
|
||||
.sort()
|
||||
.reduce(function(a: any, k) {
|
||||
a[k] = sort_object(o[k]);
|
||||
|
||||
return a;
|
||||
}, {});
|
||||
} else {
|
||||
return o;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { cleanTargetDir, getCargoBins } from "./cleanup";
|
||||
import { CacheConfig, STATE_BINS, STATE_KEY } from "./config";
|
||||
import { cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig } from "./config";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
core.error(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -29,9 +29,6 @@ async function run() {
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
const bins = await getCargoBins();
|
||||
core.saveState(STATE_BINS, JSON.stringify([...bins]));
|
||||
|
||||
core.info(`... Restoring cache ...`);
|
||||
const key = config.cacheKey;
|
||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
@@ -39,28 +36,31 @@ async function run() {
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored from cache key "${restoreKey}".`);
|
||||
core.saveState(STATE_KEY, restoreKey);
|
||||
|
||||
if (restoreKey !== key) {
|
||||
const match = restoreKey === key;
|
||||
core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
|
||||
if (!match) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// We restored the cache but it is not a full match.
|
||||
config.saveState();
|
||||
}
|
||||
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
setCacheHitOutput(match);
|
||||
} else {
|
||||
core.info("No cache found.");
|
||||
config.saveState();
|
||||
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
} catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
24
src/save.ts
24
src/save.ts
@@ -3,12 +3,12 @@ import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
|
||||
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig, STATE_KEY } from "./config";
|
||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
core.error(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -20,15 +20,15 @@ async function run() {
|
||||
}
|
||||
|
||||
try {
|
||||
const config = await CacheConfig.new();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
if (core.getState(STATE_KEY) === config.cacheKey) {
|
||||
if (isCacheUpToDate()) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = CacheConfig.fromState();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
|
||||
@@ -45,16 +45,16 @@ async function run() {
|
||||
}
|
||||
|
||||
try {
|
||||
const creates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`);
|
||||
await cleanRegistry(allPackages, creates === "true");
|
||||
const crates = core.getInput("cache-all-crates").toLowerCase() || "false"
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
|
||||
await cleanRegistry(allPackages, crates !== "true");
|
||||
} catch (e) {
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
|
||||
try {
|
||||
core.info(`... Cleaning cargo/bin ...`);
|
||||
await cleanBin();
|
||||
await cleanBin(config.cargoBins);
|
||||
} catch (e) {
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
|
||||
@@ -22,8 +22,8 @@ export async function getCmdOutput(
|
||||
...options,
|
||||
});
|
||||
} catch (e) {
|
||||
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.info(`[warning] ${stderr}`);
|
||||
core.error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.error(stderr);
|
||||
throw e;
|
||||
}
|
||||
return stdout;
|
||||
|
||||
Reference in New Issue
Block a user