mirror of
https://github.com/Swatinem/rust-cache.git
synced 2025-12-27 01:53:59 -05:00
Compare commits
60 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb2cf0cc7c | ||
|
|
74e8e24b6d | ||
|
|
f8f67b7515 | ||
|
|
5b2b053862 | ||
|
|
3bb3a9a087 | ||
|
|
d127014599 | ||
|
|
801365cd81 | ||
|
|
c5ed9ba6b7 | ||
|
|
536c94f32c | ||
|
|
842ef286ff | ||
|
|
1b344a0a23 | ||
|
|
31c41a926e | ||
|
|
ebd95456c3 | ||
|
|
3b8bbcb11d | ||
|
|
f82d41bcc2 | ||
|
|
063471b9dd | ||
|
|
ce325b6065 | ||
|
|
da42bbe56d | ||
|
|
a9bca6b5a6 | ||
|
|
b17d52110e | ||
|
|
b495963495 | ||
|
|
83aad8d470 | ||
|
|
958028d559 | ||
|
|
27793b3b80 | ||
|
|
be44a3e6ff | ||
|
|
2639a56bb8 | ||
|
|
cbcc887094 | ||
|
|
ae893481e8 | ||
|
|
d7bda0e369 | ||
|
|
9c05405335 | ||
|
|
08d3994b7a | ||
|
|
9e10a44ea3 | ||
|
|
fb2efae33d | ||
|
|
da5df52d2f | ||
|
|
0eea7b85d4 | ||
|
|
645c6972a6 | ||
|
|
6ccf2463db | ||
|
|
9cc357c650 | ||
|
|
9de90d2338 | ||
|
|
292ef23e77 | ||
|
|
5f6034beb8 | ||
|
|
b740ae5d3a | ||
|
|
e8e3c57b3b | ||
|
|
f77cb1be47 | ||
|
|
2bcc375de8 | ||
|
|
bd4d2a7017 | ||
|
|
d38127a85b | ||
|
|
a4a1d8e7a6 | ||
|
|
33677a20f2 | ||
|
|
1d1bff80c5 | ||
|
|
08ca2ff969 | ||
|
|
ef89c3a8eb | ||
|
|
d45cd2b045 | ||
|
|
271ff4b692 | ||
|
|
a6b59fa340 | ||
|
|
e0c07d2a65 | ||
|
|
06ff70612d | ||
|
|
1304a2ec8d | ||
|
|
cfcc373039 | ||
|
|
8902a8fc6c |
31
.github/workflows/selftest.yml
vendored
Normal file
31
.github/workflows/selftest.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
selftest:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
|
||||
name: Test Action on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- uses: ./
|
||||
with:
|
||||
cache-on-failure: true
|
||||
|
||||
- run: |
|
||||
cargo install cargo-deny --locked
|
||||
cargo check
|
||||
cargo test
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
||||
node_modules
|
||||
/target
|
||||
|
||||
34
CHANGELOG.md
Normal file
34
CHANGELOG.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Changelog
|
||||
|
||||
## 1.4.0
|
||||
|
||||
- Clean both `debug` and `release` target directories.
|
||||
|
||||
## 1.3.0
|
||||
|
||||
- Use Rust toolchain file as additional cache key.
|
||||
- Allow for a configurable target-dir.
|
||||
|
||||
## 1.2.0
|
||||
|
||||
- Cache `~/.cargo/bin`.
|
||||
- Support for custom `$CARGO_HOME`.
|
||||
- Add a `cache-hit` output.
|
||||
- Add a new `sharedKey` option that overrides the automatic job-name based key.
|
||||
|
||||
## 1.1.0
|
||||
|
||||
- Add a new `working-directory` input.
|
||||
- Support caching git dependencies.
|
||||
- Lots of other improvements.
|
||||
|
||||
## 1.0.2
|
||||
|
||||
- Don’t prune targets that have a different name from the crate, but do prune targets from the workspace.
|
||||
|
||||
## 1.0.1
|
||||
|
||||
- Improved logging output.
|
||||
- Make sure to consider `all-features` dependencies when pruning.
|
||||
- Work around macOS cache corruption.
|
||||
- Remove git-db cache for now.
|
||||
1665
Cargo.lock
generated
Normal file
1665
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
10
Cargo.toml
Normal file
10
Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
publish = false
|
||||
name = "rust-cache"
|
||||
version = "0.1.0"
|
||||
authors = ["Arpad Borsos <arpad.borsos@googlemail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dev-dependencies]
|
||||
reqwest = "0.11.0"
|
||||
actix-web = { git = "https://github.com/actix/actix-web.git", rev = "bd26083f333ecf63e3eb444748250364ce124f5e" }
|
||||
165
LICENSE
Normal file
165
LICENSE
Normal file
@@ -0,0 +1,165 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
120
README.md
120
README.md
@@ -1,51 +1,111 @@
|
||||
# Rust Cache Action
|
||||
|
||||
A GitHub Action that implements smart caching for rust/cargo projects
|
||||
|
||||
## Inputs
|
||||
|
||||
- `key` - An optional key for the `target` cache. This is useful in case you
|
||||
have different jobs for test / check / clippy, etc
|
||||
A GitHub Action that implements smart caching for rust/cargo projects with
|
||||
sensible defaults.
|
||||
|
||||
## Example usage
|
||||
|
||||
```yaml
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# selecting a toolchain either by action or manual `rustup` calls should happen
|
||||
# before the plugin, as it uses the current rustc version as its cache key
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
key: test
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
```
|
||||
|
||||
## Specifics
|
||||
## Inputs
|
||||
|
||||
This action tries to be better than just caching the following directories:
|
||||
: `key`
|
||||
An optional key that is added to the automatic cache key.
|
||||
|
||||
```
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
```
|
||||
: `sharedKey`
|
||||
An additional key that is stable over multiple jobs.
|
||||
|
||||
It disables incremental compilation and only caches dependencies. The
|
||||
assumption is that we will likely recompile the own crate(s) anyway.
|
||||
: `working-directory`
|
||||
The working directory the action operates in, is case the cargo project is not
|
||||
located in the repo root.
|
||||
|
||||
It also separates the cache into 4 groups, each treated differently:
|
||||
: `target-dir`
|
||||
The target directory that should be cleaned and persisted, defaults to `./target`.
|
||||
|
||||
- Index: `~/.cargo/registry/index/<registry>`:
|
||||
: `cache-on-failure`
|
||||
Cache even if the build fails, defaults to false
|
||||
|
||||
This is always restored from its latest snapshot, and persisted based on the
|
||||
most recent revision.
|
||||
## Outputs
|
||||
|
||||
- Registry / Cache: `~/.cargo/registry/cache/<registry>`:
|
||||
: `cache-hit`
|
||||
|
||||
Automatically keyed by the lockfile/toml hash, and is being pruned to only
|
||||
persist the dependencies that are being used.
|
||||
This is a boolean flag that will be set to `true` when there was an exact cache hit.
|
||||
|
||||
- Registry / Git: `~/.cargo/registry/git/<registry>`:
|
||||
## Cache Effectiveness
|
||||
|
||||
Automatically keyed by the lockfile/toml hash. Pruning is still TODO.
|
||||
This action only caches the _dependencies_ of a crate, so is more effective if
|
||||
the dependency / own code ratio is higher.
|
||||
|
||||
- target: `./target`
|
||||
It is also most effective for repositories with a `Cargo.lock` file. Library
|
||||
repositories with only a `Cargo.toml` file have limited benefits, as cargo will
|
||||
_always_ use the most up-to-date dependency versions, which may not be cached.
|
||||
|
||||
Automatically keyed by the lockfile/toml hash, and is being pruned to only
|
||||
persist the dependencies that are being used. This is especially throwing
|
||||
away any intermediate artifacts.
|
||||
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
|
||||
Using it with Nightly Rust is less effective as it will throw away the cache every day.
|
||||
|
||||
## Versioning
|
||||
|
||||
I use the `v1` branch similar to `master` development, so if you want to have
|
||||
a more stable experience, please use a fixed revision or tag.
|
||||
|
||||
## Cache Details
|
||||
|
||||
This action currently caches the following files/directories:
|
||||
|
||||
- `~/.cargo/bin`
|
||||
- `~/.cargo/registry/index`
|
||||
- `~/.cargo/registry/cache`
|
||||
- `~/.cargo/git`
|
||||
- `~/.cargo/.crates.toml`
|
||||
- `~/.cargo/.crates2.json`
|
||||
- `./target`
|
||||
|
||||
This cache is automatically keyed by:
|
||||
|
||||
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
|
||||
- the rustc release / host / hash, and
|
||||
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||
|
||||
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||
|
||||
Before being persisted, the cache is cleaned of:
|
||||
- Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`).
|
||||
- Dependencies that are no longer used.
|
||||
- Anything that is not a dependency.
|
||||
- Incremental build artifacts.
|
||||
- Any build artifacts with an `mtime` older than one week.
|
||||
|
||||
In particular, the workspace crates themselves are not cached since doing so is
|
||||
[generally not effective](https://github.com/Swatinem/rust-cache/issues/37#issuecomment-944697938).
|
||||
For this reason, this action automatically sets `CARGO_INCREMENTAL=0` to disable
|
||||
incremental compilation, so that the Rust compiler doesn't waste time creating
|
||||
the additional artifacts required for incremental builds.
|
||||
|
||||
The `~/.cargo/registry/src` directory is not cached since it is quicker for Cargo
|
||||
to recreate it from the compressed crate archives in `~/.cargo/registry/cache`.
|
||||
|
||||
The action will try to restore from a previous `Cargo.lock` version as well, so
|
||||
lockfile updates should only re-build changed dependencies.
|
||||
|
||||
Additionally, the action automatically works around
|
||||
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
|
||||
[actions/cache#403](https://github.com/actions/cache/issues/403) which would
|
||||
otherwise corrupt the cache on macOS builds.
|
||||
|
||||
## Known issues
|
||||
|
||||
- The cache cleaning process currently only runs against the build artifacts under
|
||||
`./target/debug/`, so projects using release or cross-compiled builds will experience
|
||||
larger cache sizes.
|
||||
|
||||
21
action.yml
21
action.yml
@@ -1,15 +1,30 @@
|
||||
name: "Rust Cache"
|
||||
description: "A GitHub Action that implements smart caching for rust/cargo projects"
|
||||
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
|
||||
author: "Arpad Borsos <arpad.borsos@googlemail.com>"
|
||||
inputs:
|
||||
key:
|
||||
description: "An explicit key for restoring and saving the target cache"
|
||||
description: "An additional key for the cache"
|
||||
required: false
|
||||
sharedKey:
|
||||
description: "An additional cache key that is stable over multiple jobs"
|
||||
required: false
|
||||
working-directory:
|
||||
description: "The working directory this action should operate in"
|
||||
required: false
|
||||
target-dir:
|
||||
description: "The target dir that should be cleaned and persisted, defaults to `./target`"
|
||||
required: false
|
||||
cache-on-failure:
|
||||
description: "Cache even if the build fails. Defaults to false"
|
||||
required: false
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: "A boolean value that indicates an exact match was found"
|
||||
runs:
|
||||
using: "node12"
|
||||
main: "dist/restore/index.js"
|
||||
post: "dist/save/index.js"
|
||||
post-if: "success()"
|
||||
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"
|
||||
branding:
|
||||
icon: "archive"
|
||||
color: "gray-dark"
|
||||
|
||||
53460
dist/restore/index.js
vendored
53460
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
53669
dist/save/index.js
vendored
53669
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
1088
package-lock.json
generated
1088
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
24
package.json
24
package.json
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "rust-cache",
|
||||
"version": "1.0.0",
|
||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects",
|
||||
"version": "1.4.0",
|
||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||
"keywords": [
|
||||
"actions",
|
||||
"rust",
|
||||
@@ -17,20 +17,22 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/Swatinem/rust-cache/issues"
|
||||
},
|
||||
"funding": "https://github.com/sponsors/Swatinem",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/Swatinem"
|
||||
},
|
||||
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^1.0.2",
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.4",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2"
|
||||
"@actions/cache": "^2.0.2",
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.2.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vercel/ncc": "^0.24.1",
|
||||
"typescript": "^4.0.3"
|
||||
"@vercel/ncc": "^0.33.3",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"scripts": {
|
||||
"prepare": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts"
|
||||
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
|
||||
}
|
||||
}
|
||||
|
||||
243
src/common.ts
243
src/common.ts
@@ -1,30 +1,44 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as io from "@actions/io";
|
||||
import crypto from "crypto";
|
||||
import fs from "fs";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
|
||||
const home = os.homedir();
|
||||
export const paths = {
|
||||
index: path.join(home, ".cargo/registry/index"),
|
||||
cache: path.join(home, ".cargo/registry/cache"),
|
||||
git: path.join(home, ".cargo/git/db"),
|
||||
target: "target",
|
||||
};
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
export interface CacheConfig {
|
||||
path: string;
|
||||
key: string;
|
||||
restoreKeys?: Array<string>;
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
|
||||
export interface Caches {
|
||||
index: CacheConfig;
|
||||
cache: CacheConfig;
|
||||
git: CacheConfig;
|
||||
target: CacheConfig;
|
||||
export const stateBins = "RUST_CACHE_BINS";
|
||||
export const stateKey = "RUST_CACHE_KEY";
|
||||
const stateHash = "RUST_CACHE_HASH";
|
||||
|
||||
const home = os.homedir();
|
||||
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
|
||||
export const paths = {
|
||||
cargoHome,
|
||||
index: path.join(cargoHome, "registry/index"),
|
||||
cache: path.join(cargoHome, "registry/cache"),
|
||||
git: path.join(cargoHome, "git"),
|
||||
target: targetDir,
|
||||
};
|
||||
|
||||
interface CacheConfig {
|
||||
paths: Array<string>;
|
||||
key: string;
|
||||
restoreKeys: Array<string>;
|
||||
}
|
||||
|
||||
const RefKey = "GITHUB_REF";
|
||||
@@ -33,32 +47,67 @@ export function isValidEvent(): boolean {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
|
||||
export async function getCaches(): Promise<Caches> {
|
||||
const rustKey = await getRustKey();
|
||||
let lockHash = core.getState("lockHash");
|
||||
export async function getCacheConfig(): Promise<CacheConfig> {
|
||||
let lockHash = core.getState(stateHash);
|
||||
if (!lockHash) {
|
||||
lockHash = await getLockfileHash();
|
||||
core.saveState("lockHash", lockHash);
|
||||
core.saveState(stateHash, lockHash);
|
||||
}
|
||||
let targetKey = core.getInput("key");
|
||||
if (targetKey) {
|
||||
targetKey = `${targetKey}-`;
|
||||
|
||||
let key = `v0-rust-`;
|
||||
|
||||
const sharedKey = core.getInput("sharedKey");
|
||||
if (sharedKey) {
|
||||
key += `${sharedKey}-`;
|
||||
} else {
|
||||
const inputKey = core.getInput("key");
|
||||
if (inputKey) {
|
||||
key += `${inputKey}-`;
|
||||
}
|
||||
|
||||
const job = process.env.GITHUB_JOB;
|
||||
if (job) {
|
||||
key += `${job}-`;
|
||||
}
|
||||
}
|
||||
|
||||
key += await getRustKey();
|
||||
|
||||
return {
|
||||
index: { path: paths.index, key: "registry-index-XXX", restoreKeys: ["registry-index"] },
|
||||
cache: { path: paths.cache, key: `registry-cache-${lockHash}`, restoreKeys: ["registry-cache"] },
|
||||
git: { path: paths.git, key: "git-db" },
|
||||
target: {
|
||||
path: paths.target,
|
||||
key: `target-${targetKey}${rustKey}-${lockHash}`,
|
||||
restoreKeys: [`target-${targetKey}${rustKey}`],
|
||||
},
|
||||
paths: [
|
||||
path.join(cargoHome, "bin"),
|
||||
path.join(cargoHome, ".crates2.json"),
|
||||
path.join(cargoHome, ".crates.toml"),
|
||||
paths.git,
|
||||
paths.cache,
|
||||
paths.index,
|
||||
paths.target,
|
||||
],
|
||||
key: `${key}-${lockHash}`,
|
||||
restoreKeys: [key],
|
||||
};
|
||||
}
|
||||
|
||||
export async function getRustKey(): Promise<string> {
|
||||
export async function getCargoBins(): Promise<Set<string>> {
|
||||
try {
|
||||
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
|
||||
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
|
||||
);
|
||||
const bins = new Set<string>();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
} catch {
|
||||
return new Set<string>();
|
||||
}
|
||||
}
|
||||
|
||||
async function getRustKey(): Promise<string> {
|
||||
const rustc = await getRustVersion();
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"]}`;
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
|
||||
}
|
||||
|
||||
interface RustVersion {
|
||||
@@ -67,7 +116,7 @@ interface RustVersion {
|
||||
"commit-hash": string;
|
||||
}
|
||||
|
||||
export async function getRustVersion(): Promise<RustVersion> {
|
||||
async function getRustVersion(): Promise<RustVersion> {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
let splits = stdout
|
||||
.split(/[\n\r]+/)
|
||||
@@ -95,22 +144,10 @@ export async function getCmdOutput(
|
||||
return stdout;
|
||||
}
|
||||
|
||||
export async function getRegistryName() {
|
||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
||||
const files = await globber.glob();
|
||||
if (files.length > 1) {
|
||||
core.debug(`got multiple registries: "${files.join('", "')}"`);
|
||||
}
|
||||
|
||||
const first = files.shift();
|
||||
if (!first) {
|
||||
return;
|
||||
}
|
||||
return path.basename(path.dirname(first));
|
||||
}
|
||||
|
||||
export async function getLockfileHash() {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false });
|
||||
async function getLockfileHash(): Promise<string> {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
@@ -120,5 +157,109 @@ export async function getLockfileHash() {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
return hasher.digest("hex");
|
||||
return hasher.digest("hex").slice(0, 20);
|
||||
}
|
||||
|
||||
export interface PackageDefinition {
|
||||
name: string;
|
||||
version: string;
|
||||
path: string;
|
||||
targets: Array<string>;
|
||||
}
|
||||
|
||||
export type Packages = Array<PackageDefinition>;
|
||||
|
||||
interface Meta {
|
||||
packages: Array<{
|
||||
name: string;
|
||||
version: string;
|
||||
manifest_path: string;
|
||||
targets: Array<{ kind: Array<string>; name: string }>;
|
||||
}>;
|
||||
}
|
||||
|
||||
export async function getPackages(): Promise<Packages> {
|
||||
const cwd = process.cwd();
|
||||
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
||||
|
||||
return meta.packages
|
||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
||||
.map((p) => {
|
||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
||||
return { name: p.name, version: p.version, targets, path: path.dirname(p.manifest_path) };
|
||||
});
|
||||
}
|
||||
|
||||
export async function cleanTarget(packages: Packages) {
|
||||
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
|
||||
|
||||
await cleanProfileTarget(packages, "debug");
|
||||
await cleanProfileTarget(packages, "release");
|
||||
}
|
||||
|
||||
async function cleanProfileTarget(packages: Packages, profile: string) {
|
||||
try {
|
||||
await fs.promises.access(path.join(targetDir, profile));
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
await io.rmRF(path.join(targetDir, profile, "./examples"));
|
||||
await io.rmRF(path.join(targetDir, profile, "./incremental"));
|
||||
|
||||
let dir: fs.Dir;
|
||||
// remove all *files* from the profile directory
|
||||
dir = await fs.promises.opendir(path.join(targetDir, profile));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept(path.join(targetDir, profile, "./build"), keepPkg);
|
||||
await rmExcept(path.join(targetDir, profile, "./.fingerprint"), keepPkg);
|
||||
|
||||
const keepDeps = new Set(
|
||||
packages.flatMap((p) => {
|
||||
const names = [];
|
||||
for (const n of [p.name, ...p.targets]) {
|
||||
const name = n.replace(/-/g, "_");
|
||||
names.push(name, `lib${name}`);
|
||||
}
|
||||
return names;
|
||||
}),
|
||||
);
|
||||
await rmExcept(path.join(targetDir, profile, "./deps"), keepDeps);
|
||||
}
|
||||
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
|
||||
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
|
||||
const dir = await fs.promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
// we don’t really know
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function rm(parent: string, dirent: fs.Dirent) {
|
||||
try {
|
||||
const fileName = path.join(parent, dirent.name);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await fs.promises.unlink(fileName);
|
||||
} else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
3
src/main.rs
Normal file
3
src/main.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
}
|
||||
@@ -1,35 +1,56 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import { getCaches, isValidEvent } from "./common";
|
||||
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
|
||||
|
||||
async function run() {
|
||||
if (!isValidEvent()) {
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
|
||||
if (cacheOnFailure !== "true") {
|
||||
cacheOnFailure = "false";
|
||||
}
|
||||
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
|
||||
const caches = await getCaches();
|
||||
for (const [name, { path, key, restoreKeys }] of Object.entries(caches)) {
|
||||
try {
|
||||
core.startGroup(`Restoring "${path}" from "${key}"…`);
|
||||
const restoreKey = await cache.restoreCache([path], key, restoreKeys);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored "${path}" from cache key "${restoreKey}".`);
|
||||
core.saveState(name, restoreKey);
|
||||
} else {
|
||||
core.info("No cache found.");
|
||||
}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
} finally {
|
||||
core.endGroup();
|
||||
const { paths, key, restoreKeys } = await getCacheConfig();
|
||||
|
||||
const bins = await getCargoBins();
|
||||
core.saveState(stateBins, JSON.stringify([...bins]));
|
||||
|
||||
core.info(`Restoring paths:\n ${paths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
|
||||
const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored from cache key "${restoreKey}".`);
|
||||
core.saveState(stateKey, restoreKey);
|
||||
|
||||
if (restoreKey !== key) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
const packages = await getPackages();
|
||||
|
||||
await cleanTarget(packages);
|
||||
}
|
||||
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
} else {
|
||||
core.info("No cache found.");
|
||||
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
setCacheHitOutput(false);
|
||||
|
||||
core.info(`[warning] ${(e as any).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function setCacheHitOutput(cacheHit: boolean): void {
|
||||
core.setOutput("cache-hit", cacheHit.toString());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
202
src/save.ts
202
src/save.ts
@@ -1,133 +1,161 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as io from "@actions/io";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { getCaches, getCmdOutput, getRegistryName, isValidEvent, paths } from "./common";
|
||||
import {
|
||||
cleanTarget,
|
||||
getCacheConfig,
|
||||
getCargoBins,
|
||||
getPackages,
|
||||
Packages,
|
||||
paths,
|
||||
rm,
|
||||
stateBins,
|
||||
stateKey,
|
||||
} from "./common";
|
||||
|
||||
async function run() {
|
||||
if (!isValidEvent()) {
|
||||
//return;
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const caches = await getCaches();
|
||||
const { paths: savePaths, key } = await getCacheConfig();
|
||||
|
||||
if (core.getState(stateKey) === key) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
|
||||
const registryName = await getRegistryName();
|
||||
const packages = await getPackages();
|
||||
|
||||
await pruneTarget(packages);
|
||||
if (registryName) {
|
||||
// save the index based on its revision
|
||||
const indexRef = await getIndexRef(registryName);
|
||||
caches.index.key = `registry-index-${indexRef}`;
|
||||
await io.rmRF(path.join(paths.index, registryName, ".cache"));
|
||||
try {
|
||||
await cleanRegistry(registryName, packages);
|
||||
} catch {}
|
||||
|
||||
await pruneRegistryCache(registryName, packages);
|
||||
} else {
|
||||
delete (caches as any).index;
|
||||
delete (caches as any).cache;
|
||||
}
|
||||
try {
|
||||
await cleanBin();
|
||||
} catch {}
|
||||
|
||||
for (const [name, { path, key }] of Object.entries(caches)) {
|
||||
if (core.getState(name) === key) {
|
||||
core.info(`Cache for "${path}" up-to-date.`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
core.startGroup(`Saving "${path}" to cache key "${key}"…`);
|
||||
if (await cache.saveCache([path], key)) {
|
||||
core.info(`Saved "${path}" to cache key "${key}".`);
|
||||
}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
} finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
try {
|
||||
await cleanGit(packages);
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
await cleanTarget(packages);
|
||||
} catch {}
|
||||
|
||||
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using key:\n ${key}`);
|
||||
await cache.saveCache(savePaths, key);
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.info(`[warning] ${(e as any).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
async function getIndexRef(registryName: string) {
|
||||
const cwd = path.join(paths.index, registryName);
|
||||
return (await getCmdOutput("git", ["rev-parse", "--short", "origin/master"], { cwd })).trim();
|
||||
async function getRegistryName(): Promise<string> {
|
||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
||||
const files = await globber.glob();
|
||||
if (files.length > 1) {
|
||||
core.warning(`got multiple registries: "${files.join('", "')}"`);
|
||||
}
|
||||
|
||||
const first = files.shift()!;
|
||||
return path.basename(path.dirname(first));
|
||||
}
|
||||
|
||||
interface PackageDefinition {
|
||||
name: string;
|
||||
version: string;
|
||||
async function cleanBin() {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(stateBins));
|
||||
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
}
|
||||
|
||||
const dir = await fs.promises.opendir(path.join(paths.cargoHome, "bin"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !bins.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Packages = Array<PackageDefinition>;
|
||||
async function cleanRegistry(registryName: string, packages: Packages) {
|
||||
await io.rmRF(path.join(paths.index, registryName, ".cache"));
|
||||
|
||||
async function getPackages(): Promise<Packages> {
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--format-version", "1"]));
|
||||
return meta.packages.map(({ name, version }: any) => ({ name, version }));
|
||||
}
|
||||
|
||||
async function pruneRegistryCache(registryName: string, packages: Packages) {
|
||||
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
|
||||
|
||||
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
await fs.promises.unlink(fileName);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pruneTarget(packages: Packages) {
|
||||
await fs.promises.unlink("./target/.rustc_info.json");
|
||||
await io.rmRF("./target/debug/examples");
|
||||
await io.rmRF("./target/debug/incremental");
|
||||
async function cleanGit(packages: Packages) {
|
||||
const coPath = path.join(paths.git, "checkouts");
|
||||
const dbPath = path.join(paths.git, "db");
|
||||
const repos = new Map<string, Set<string>>();
|
||||
for (const p of packages) {
|
||||
if (!p.path.startsWith(coPath)) {
|
||||
continue;
|
||||
}
|
||||
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
|
||||
const refs = repos.get(repo);
|
||||
if (refs) {
|
||||
refs.add(ref);
|
||||
} else {
|
||||
repos.set(repo, new Set([ref]));
|
||||
}
|
||||
}
|
||||
|
||||
// we have to keep both the clone, and the checkout, removing either will
|
||||
// trigger a rebuild
|
||||
|
||||
let dir: fs.Dir;
|
||||
|
||||
// remove all *files* from debug
|
||||
dir = await fs.promises.opendir("./target/debug");
|
||||
// clean the db
|
||||
dir = await fs.promises.opendir(dbPath);
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
await fs.promises.unlink(fileName);
|
||||
if (!repos.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept("./target/debug/build", keepPkg);
|
||||
await rmExcept("./target/debug/.fingerprint", keepPkg);
|
||||
|
||||
const keepDeps = new Set(
|
||||
packages.flatMap((p) => {
|
||||
const name = p.name.replace(/-/g, "_");
|
||||
return [name, `lib${name}`];
|
||||
}),
|
||||
);
|
||||
await rmExcept("./target/debug/deps", keepDeps);
|
||||
}
|
||||
|
||||
const twoWeeks = 14 * 24 * 3600 * 1000;
|
||||
|
||||
async function rmExcept(dirName: string, keepPrefix: Set<string>) {
|
||||
const dir = await fs.promises.opendir(dirName);
|
||||
// clean the checkouts
|
||||
dir = await fs.promises.opendir(coPath);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
const refs = repos.get(dirent.name);
|
||||
if (!refs) {
|
||||
await rm(dir.path, dirent);
|
||||
continue;
|
||||
}
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > twoWeeks) {
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await fs.promises.unlink(fileName);
|
||||
} else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
if (!dirent.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
|
||||
for await (const dirent of refsDir) {
|
||||
if (!refs.has(dirent.name)) {
|
||||
await rm(refsDir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function macOsWorkaround() {
|
||||
try {
|
||||
// Workaround for https://github.com/actions/cache/issues/403
|
||||
// Also see https://github.com/rust-lang/cargo/issues/8603
|
||||
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
||||
} catch {}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"diagnostics": true,
|
||||
"lib": ["esnext"],
|
||||
|
||||
"target": "es2017",
|
||||
"target": "es2020",
|
||||
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "node",
|
||||
|
||||
Reference in New Issue
Block a user