mirror of
https://github.com/Swatinem/rust-cache.git
synced 2025-12-27 01:53:59 -05:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81d053bdb0 | ||
|
|
5040f39404 | ||
|
|
2055a01dcd | ||
|
|
cb2cf0cc7c | ||
|
|
74e8e24b6d | ||
|
|
f8f67b7515 | ||
|
|
5b2b053862 | ||
|
|
3bb3a9a087 | ||
|
|
d127014599 | ||
|
|
801365cd81 | ||
|
|
c5ed9ba6b7 | ||
|
|
536c94f32c | ||
|
|
842ef286ff | ||
|
|
1b344a0a23 | ||
|
|
31c41a926e | ||
|
|
ebd95456c3 | ||
|
|
3b8bbcb11d | ||
|
|
f82d41bcc2 | ||
|
|
063471b9dd | ||
|
|
ce325b6065 | ||
|
|
da42bbe56d |
4
.github/workflows/selftest.yml
vendored
4
.github/workflows/selftest.yml
vendored
@@ -22,8 +22,10 @@ jobs:
|
||||
override: true
|
||||
|
||||
- uses: ./
|
||||
with:
|
||||
cache-on-failure: true
|
||||
|
||||
- run: |
|
||||
cargo install cargo-deny --locked
|
||||
cargo check
|
||||
cargo test
|
||||
cargo test
|
||||
@@ -1,5 +1,14 @@
|
||||
# Changelog
|
||||
|
||||
## 1.4.0
|
||||
|
||||
- Clean both `debug` and `release` target directories.
|
||||
|
||||
## 1.3.0
|
||||
|
||||
- Use Rust toolchain file as additional cache key.
|
||||
- Allow for a configurable target-dir.
|
||||
|
||||
## 1.2.0
|
||||
|
||||
- Cache `~/.cargo/bin`.
|
||||
|
||||
165
LICENSE
Normal file
165
LICENSE
Normal file
@@ -0,0 +1,165 @@
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
|
||||
This version of the GNU Lesser General Public License incorporates
|
||||
the terms and conditions of version 3 of the GNU General Public
|
||||
License, supplemented by the additional permissions listed below.
|
||||
|
||||
0. Additional Definitions.
|
||||
|
||||
As used herein, "this License" refers to version 3 of the GNU Lesser
|
||||
General Public License, and the "GNU GPL" refers to version 3 of the GNU
|
||||
General Public License.
|
||||
|
||||
"The Library" refers to a covered work governed by this License,
|
||||
other than an Application or a Combined Work as defined below.
|
||||
|
||||
An "Application" is any work that makes use of an interface provided
|
||||
by the Library, but which is not otherwise based on the Library.
|
||||
Defining a subclass of a class defined by the Library is deemed a mode
|
||||
of using an interface provided by the Library.
|
||||
|
||||
A "Combined Work" is a work produced by combining or linking an
|
||||
Application with the Library. The particular version of the Library
|
||||
with which the Combined Work was made is also called the "Linked
|
||||
Version".
|
||||
|
||||
The "Minimal Corresponding Source" for a Combined Work means the
|
||||
Corresponding Source for the Combined Work, excluding any source code
|
||||
for portions of the Combined Work that, considered in isolation, are
|
||||
based on the Application, and not on the Linked Version.
|
||||
|
||||
The "Corresponding Application Code" for a Combined Work means the
|
||||
object code and/or source code for the Application, including any data
|
||||
and utility programs needed for reproducing the Combined Work from the
|
||||
Application, but excluding the System Libraries of the Combined Work.
|
||||
|
||||
1. Exception to Section 3 of the GNU GPL.
|
||||
|
||||
You may convey a covered work under sections 3 and 4 of this License
|
||||
without being bound by section 3 of the GNU GPL.
|
||||
|
||||
2. Conveying Modified Versions.
|
||||
|
||||
If you modify a copy of the Library, and, in your modifications, a
|
||||
facility refers to a function or data to be supplied by an Application
|
||||
that uses the facility (other than as an argument passed when the
|
||||
facility is invoked), then you may convey a copy of the modified
|
||||
version:
|
||||
|
||||
a) under this License, provided that you make a good faith effort to
|
||||
ensure that, in the event an Application does not supply the
|
||||
function or data, the facility still operates, and performs
|
||||
whatever part of its purpose remains meaningful, or
|
||||
|
||||
b) under the GNU GPL, with none of the additional permissions of
|
||||
this License applicable to that copy.
|
||||
|
||||
3. Object Code Incorporating Material from Library Header Files.
|
||||
|
||||
The object code form of an Application may incorporate material from
|
||||
a header file that is part of the Library. You may convey such object
|
||||
code under terms of your choice, provided that, if the incorporated
|
||||
material is not limited to numerical parameters, data structure
|
||||
layouts and accessors, or small macros, inline functions and templates
|
||||
(ten or fewer lines in length), you do both of the following:
|
||||
|
||||
a) Give prominent notice with each copy of the object code that the
|
||||
Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the object code with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
4. Combined Works.
|
||||
|
||||
You may convey a Combined Work under terms of your choice that,
|
||||
taken together, effectively do not restrict modification of the
|
||||
portions of the Library contained in the Combined Work and reverse
|
||||
engineering for debugging such modifications, if you also do each of
|
||||
the following:
|
||||
|
||||
a) Give prominent notice with each copy of the Combined Work that
|
||||
the Library is used in it and that the Library and its use are
|
||||
covered by this License.
|
||||
|
||||
b) Accompany the Combined Work with a copy of the GNU GPL and this license
|
||||
document.
|
||||
|
||||
c) For a Combined Work that displays copyright notices during
|
||||
execution, include the copyright notice for the Library among
|
||||
these notices, as well as a reference directing the user to the
|
||||
copies of the GNU GPL and this license document.
|
||||
|
||||
d) Do one of the following:
|
||||
|
||||
0) Convey the Minimal Corresponding Source under the terms of this
|
||||
License, and the Corresponding Application Code in a form
|
||||
suitable for, and under terms that permit, the user to
|
||||
recombine or relink the Application with a modified version of
|
||||
the Linked Version to produce a modified Combined Work, in the
|
||||
manner specified by section 6 of the GNU GPL for conveying
|
||||
Corresponding Source.
|
||||
|
||||
1) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (a) uses at run time
|
||||
a copy of the Library already present on the user's computer
|
||||
system, and (b) will operate properly with a modified version
|
||||
of the Library that is interface-compatible with the Linked
|
||||
Version.
|
||||
|
||||
e) Provide Installation Information, but only if you would otherwise
|
||||
be required to provide such information under section 6 of the
|
||||
GNU GPL, and only to the extent that such information is
|
||||
necessary to install and execute a modified version of the
|
||||
Combined Work produced by recombining or relinking the
|
||||
Application with a modified version of the Linked Version. (If
|
||||
you use option 4d0, the Installation Information must accompany
|
||||
the Minimal Corresponding Source and Corresponding Application
|
||||
Code. If you use option 4d1, you must provide the Installation
|
||||
Information in the manner specified by section 6 of the GNU GPL
|
||||
for conveying Corresponding Source.)
|
||||
|
||||
5. Combined Libraries.
|
||||
|
||||
You may place library facilities that are a work based on the
|
||||
Library side by side in a single library together with other library
|
||||
facilities that are not Applications and are not covered by this
|
||||
License, and convey such a combined library under terms of your
|
||||
choice, if you do both of the following:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work based
|
||||
on the Library, uncombined with any other library facilities,
|
||||
conveyed under the terms of this License.
|
||||
|
||||
b) Give prominent notice with the combined library that part of it
|
||||
is a work based on the Library, and explaining where to find the
|
||||
accompanying uncombined form of the same work.
|
||||
|
||||
6. Revised Versions of the GNU Lesser General Public License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions
|
||||
of the GNU Lesser General Public License from time to time. Such new
|
||||
versions will be similar in spirit to the present version, but may
|
||||
differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Library as you received it specifies that a certain numbered version
|
||||
of the GNU Lesser General Public License "or any later version"
|
||||
applies to it, you have the option of following the terms and
|
||||
conditions either of that published version or of any later version
|
||||
published by the Free Software Foundation. If the Library as you
|
||||
received it does not specify a version number of the GNU Lesser
|
||||
General Public License, you may choose any version of the GNU Lesser
|
||||
General Public License ever published by the Free Software Foundation.
|
||||
|
||||
If the Library as you received it specifies that a proxy can decide
|
||||
whether future versions of the GNU Lesser General Public License shall
|
||||
apply, that proxy's public statement of acceptance of any version is
|
||||
permanent authorization for you to choose that version for the
|
||||
Library.
|
||||
44
README.md
44
README.md
@@ -6,6 +6,8 @@ sensible defaults.
|
||||
## Example usage
|
||||
|
||||
```yaml
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# selecting a toolchain either by action or manual `rustup` calls should happen
|
||||
# before the plugin, as it uses the current rustc version as its cache key
|
||||
- uses: actions-rs/toolchain@v1
|
||||
@@ -28,6 +30,12 @@ An additional key that is stable over multiple jobs.
|
||||
The working directory the action operates in, is case the cargo project is not
|
||||
located in the repo root.
|
||||
|
||||
: `target-dir`
|
||||
The target directory that should be cleaned and persisted, defaults to `./target`.
|
||||
|
||||
: `cache-on-failure`
|
||||
Cache even if the build fails, defaults to false
|
||||
|
||||
## Outputs
|
||||
|
||||
: `cache-hit`
|
||||
@@ -53,26 +61,40 @@ a more stable experience, please use a fixed revision or tag.
|
||||
|
||||
## Cache Details
|
||||
|
||||
The cache currently caches the following directories:
|
||||
This action currently caches the following files/directories:
|
||||
|
||||
- `~/.cargo/bin`
|
||||
- `~/.cargo/registry/index`
|
||||
- `~/.cargo/registry/cache`
|
||||
- `~/.cargo/git`
|
||||
- `~/.cargo/.crates.toml`
|
||||
- `~/.cargo/.crates2.json`
|
||||
- `./target`
|
||||
|
||||
This cache is automatically keyed by:
|
||||
|
||||
- the github `job`,
|
||||
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
|
||||
- the rustc release / host / hash, and
|
||||
- a hash of the `Cargo.lock` / `Cargo.toml` files.
|
||||
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||
|
||||
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||
|
||||
Before persisting, the cache is cleaned of intermediate artifacts and
|
||||
anything that is not a workspace dependency.
|
||||
In particular, no caching of workspace crates will be done. For
|
||||
this reason, this action will automatically set `CARGO_INCREMENTAL=0` to
|
||||
disable incremental compilation.
|
||||
Before being persisted, the cache is cleaned of:
|
||||
- Any files in `~/.cargo/bin` that were present before the action ran (for example `rustc`).
|
||||
- Dependencies that are no longer used.
|
||||
- Anything that is not a dependency.
|
||||
- Incremental build artifacts.
|
||||
- Any build artifacts with an `mtime` older than one week.
|
||||
|
||||
In particular, the workspace crates themselves are not cached since doing so is
|
||||
[generally not effective](https://github.com/Swatinem/rust-cache/issues/37#issuecomment-944697938).
|
||||
For this reason, this action automatically sets `CARGO_INCREMENTAL=0` to disable
|
||||
incremental compilation, so that the Rust compiler doesn't waste time creating
|
||||
the additional artifacts required for incremental builds.
|
||||
|
||||
The `~/.cargo/registry/src` directory is not cached since it is quicker for Cargo
|
||||
to recreate it from the compressed crate archives in `~/.cargo/registry/cache`.
|
||||
|
||||
The action will try to restore from a previous `Cargo.lock` version as well, so
|
||||
lockfile updates should only re-build changed dependencies.
|
||||
@@ -81,3 +103,9 @@ Additionally, the action automatically works around
|
||||
[cargo#8603](https://github.com/rust-lang/cargo/issues/8603) /
|
||||
[actions/cache#403](https://github.com/actions/cache/issues/403) which would
|
||||
otherwise corrupt the cache on macOS builds.
|
||||
|
||||
## Known issues
|
||||
|
||||
- The cache cleaning process currently only runs against the build artifacts under
|
||||
`./target/debug/`, so projects using release or cross-compiled builds will experience
|
||||
larger cache sizes.
|
||||
|
||||
10
action.yml
10
action.yml
@@ -11,14 +11,20 @@ inputs:
|
||||
working-directory:
|
||||
description: "The working directory this action should operate in"
|
||||
required: false
|
||||
target-dir:
|
||||
description: "The target dir that should be cleaned and persisted, defaults to `./target`"
|
||||
required: false
|
||||
cache-on-failure:
|
||||
description: "Cache even if the build fails. Defaults to false"
|
||||
required: false
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value that indicates an exact match was found'
|
||||
description: "A boolean value that indicates an exact match was found"
|
||||
runs:
|
||||
using: "node12"
|
||||
main: "dist/restore/index.js"
|
||||
post: "dist/save/index.js"
|
||||
post-if: "success()"
|
||||
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"
|
||||
branding:
|
||||
icon: "archive"
|
||||
color: "gray-dark"
|
||||
|
||||
55925
dist/restore/index.js
vendored
55925
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
55944
dist/save/index.js
vendored
55944
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
758
package-lock.json
generated
758
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
16
package.json
16
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "rust-cache",
|
||||
"version": "1.2.0",
|
||||
"version": "1.4.0",
|
||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||
"keywords": [
|
||||
"actions",
|
||||
@@ -22,15 +22,15 @@
|
||||
},
|
||||
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^1.0.6",
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.4",
|
||||
"@actions/glob": "^0.1.1",
|
||||
"@actions/io": "^1.0.2"
|
||||
"@actions/cache": "^3.0.0",
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/glob": "^0.3.0",
|
||||
"@actions/io": "^1.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vercel/ncc": "^0.27.0",
|
||||
"typescript": "^4.1.5"
|
||||
"@vercel/ncc": "^0.34.0",
|
||||
"typescript": "4.7.4"
|
||||
},
|
||||
"scripts": {
|
||||
"prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts"
|
||||
|
||||
@@ -9,9 +9,14 @@ import path from "path";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
@@ -27,7 +32,7 @@ export const paths = {
|
||||
index: path.join(cargoHome, "registry/index"),
|
||||
cache: path.join(cargoHome, "registry/cache"),
|
||||
git: path.join(cargoHome, "git"),
|
||||
target: "target",
|
||||
target: targetDir,
|
||||
};
|
||||
|
||||
interface CacheConfig {
|
||||
@@ -84,16 +89,20 @@ export async function getCacheConfig(): Promise<CacheConfig> {
|
||||
}
|
||||
|
||||
export async function getCargoBins(): Promise<Set<string>> {
|
||||
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
|
||||
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
|
||||
);
|
||||
const bins = new Set<string>();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
try {
|
||||
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
|
||||
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
|
||||
);
|
||||
const bins = new Set<string>();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
} catch {
|
||||
return new Set<string>();
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
|
||||
async function getRustKey(): Promise<string> {
|
||||
@@ -136,7 +145,9 @@ export async function getCmdOutput(
|
||||
}
|
||||
|
||||
async function getLockfileHash(): Promise<string> {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false });
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
@@ -180,13 +191,25 @@ export async function getPackages(): Promise<Packages> {
|
||||
}
|
||||
|
||||
export async function cleanTarget(packages: Packages) {
|
||||
await fs.promises.unlink("./target/.rustc_info.json");
|
||||
await io.rmRF("./target/debug/examples");
|
||||
await io.rmRF("./target/debug/incremental");
|
||||
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
|
||||
|
||||
await cleanProfileTarget(packages, "debug");
|
||||
await cleanProfileTarget(packages, "release");
|
||||
}
|
||||
|
||||
async function cleanProfileTarget(packages: Packages, profile: string) {
|
||||
try {
|
||||
await fs.promises.access(path.join(targetDir, profile));
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
await io.rmRF(path.join(targetDir, profile, "./examples"));
|
||||
await io.rmRF(path.join(targetDir, profile, "./incremental"));
|
||||
|
||||
let dir: fs.Dir;
|
||||
// remove all *files* from debug
|
||||
dir = await fs.promises.opendir("./target/debug");
|
||||
// remove all *files* from the profile directory
|
||||
dir = await fs.promises.opendir(path.join(targetDir, profile));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
@@ -194,8 +217,8 @@ export async function cleanTarget(packages: Packages) {
|
||||
}
|
||||
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept("./target/debug/build", keepPkg);
|
||||
await rmExcept("./target/debug/.fingerprint", keepPkg);
|
||||
await rmExcept(path.join(targetDir, profile, "./build"), keepPkg);
|
||||
await rmExcept(path.join(targetDir, profile, "./.fingerprint"), keepPkg);
|
||||
|
||||
const keepDeps = new Set(
|
||||
packages.flatMap((p) => {
|
||||
@@ -207,7 +230,7 @@ export async function cleanTarget(packages: Packages) {
|
||||
return names;
|
||||
}),
|
||||
);
|
||||
await rmExcept("./target/debug/deps", keepDeps);
|
||||
await rmExcept(path.join(targetDir, profile, "./deps"), keepDeps);
|
||||
}
|
||||
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
|
||||
@@ -3,7 +3,17 @@ import * as core from "@actions/core";
|
||||
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
|
||||
|
||||
async function run() {
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
|
||||
if (cacheOnFailure !== "true") {
|
||||
cacheOnFailure = "false";
|
||||
}
|
||||
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
|
||||
const { paths, key, restoreKeys } = await getCacheConfig();
|
||||
@@ -35,7 +45,7 @@ async function run() {
|
||||
} catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
33
src/save.ts
33
src/save.ts
@@ -18,6 +18,10 @@ import {
|
||||
} from "./common";
|
||||
|
||||
async function run() {
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const { paths: savePaths, key } = await getCacheConfig();
|
||||
|
||||
@@ -32,34 +36,44 @@ async function run() {
|
||||
const registryName = await getRegistryName();
|
||||
const packages = await getPackages();
|
||||
|
||||
try {
|
||||
await cleanRegistry(registryName, packages);
|
||||
} catch {}
|
||||
if (registryName) {
|
||||
try {
|
||||
await cleanRegistry(registryName, packages);
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await cleanBin();
|
||||
} catch {}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await cleanGit(packages);
|
||||
} catch {}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await cleanTarget(packages);
|
||||
} catch {}
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
|
||||
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using key:\n ${key}`);
|
||||
await cache.saveCache(savePaths, key);
|
||||
} catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
async function getRegistryName(): Promise<string> {
|
||||
async function getRegistryName(): Promise<string | null> {
|
||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
||||
const files = await globber.glob();
|
||||
if (files.length > 1) {
|
||||
@@ -67,6 +81,9 @@ async function getRegistryName(): Promise<string> {
|
||||
}
|
||||
|
||||
const first = files.shift()!;
|
||||
if (!first) {
|
||||
return null;
|
||||
}
|
||||
return path.basename(path.dirname(first));
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user