mirror of
https://gitee.com/actions-mirror/cache.git
synced 2024-11-27 13:00:42 +08:00
Merge remote-tracking branch 'origin/700-actionscache-granular-cache-control' into kotewar/readme-updates-for-granular-control
This commit is contained in:
commit
adecab4b4a
@ -46,3 +46,6 @@
|
|||||||
|
|
||||||
### 3.1.0-beta.2
|
### 3.1.0-beta.2
|
||||||
- Added support for fallback to gzip to restore old caches on windows.
|
- Added support for fallback to gzip to restore old caches on windows.
|
||||||
|
|
||||||
|
### 3.1.0-beta.3
|
||||||
|
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.
|
||||||
|
@ -66,13 +66,13 @@ test("restore without AC available should no-op", async () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||||
|
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore on GHES without AC available should no-op", async () => {
|
test("restore on GHES without AC available should no-op", async () => {
|
||||||
@ -82,13 +82,13 @@ test("restore on GHES without AC available should no-op", async () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||||
|
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore on GHES with AC available ", async () => {
|
test("restore on GHES with AC available ", async () => {
|
||||||
|
@ -30,12 +30,19 @@ test("StateProvider saves states", async () => {
|
|||||||
.mockImplementation(name =>
|
.mockImplementation(name =>
|
||||||
jest.requireActual("@actions/core").getState(name)
|
jest.requireActual("@actions/core").getState(name)
|
||||||
);
|
);
|
||||||
|
|
||||||
const saveStateMock = jest
|
const saveStateMock = jest
|
||||||
.spyOn(core, "saveState")
|
.spyOn(core, "saveState")
|
||||||
.mockImplementation((key, value) => {
|
.mockImplementation((key, value) => {
|
||||||
return jest.requireActual("@actions/core").saveState(key, value);
|
return jest.requireActual("@actions/core").saveState(key, value);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const setOutputMock = jest
|
||||||
|
.spyOn(core, "setOutput")
|
||||||
|
.mockImplementation((key, value) => {
|
||||||
|
return jest.requireActual("@actions/core").setOutput(key, value);
|
||||||
|
});
|
||||||
|
|
||||||
const cacheMatchedKey = "node-cache";
|
const cacheMatchedKey = "node-cache";
|
||||||
|
|
||||||
const stateProvider: IStateProvider = new StateProvider();
|
const stateProvider: IStateProvider = new StateProvider();
|
||||||
@ -46,6 +53,7 @@ test("StateProvider saves states", async () => {
|
|||||||
|
|
||||||
expect(getStateMock).toHaveBeenCalledTimes(2);
|
expect(getStateMock).toHaveBeenCalledTimes(2);
|
||||||
expect(saveStateMock).toHaveBeenCalledTimes(2);
|
expect(saveStateMock).toHaveBeenCalledTimes(2);
|
||||||
|
expect(setOutputMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("NullStateProvider saves outputs", async () => {
|
test("NullStateProvider saves outputs", async () => {
|
||||||
@ -54,11 +62,19 @@ test("NullStateProvider saves outputs", async () => {
|
|||||||
.mockImplementation(name =>
|
.mockImplementation(name =>
|
||||||
jest.requireActual("@actions/core").getState(name)
|
jest.requireActual("@actions/core").getState(name)
|
||||||
);
|
);
|
||||||
|
|
||||||
const setOutputMock = jest
|
const setOutputMock = jest
|
||||||
.spyOn(core, "setOutput")
|
.spyOn(core, "setOutput")
|
||||||
.mockImplementation((key, value) => {
|
.mockImplementation((key, value) => {
|
||||||
return jest.requireActual("@actions/core").setOutput(key, value);
|
return jest.requireActual("@actions/core").setOutput(key, value);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const saveStateMock = jest
|
||||||
|
.spyOn(core, "saveState")
|
||||||
|
.mockImplementation((key, value) => {
|
||||||
|
return jest.requireActual("@actions/core").saveState(key, value);
|
||||||
|
});
|
||||||
|
|
||||||
const cacheMatchedKey = "node-cache";
|
const cacheMatchedKey = "node-cache";
|
||||||
const nullStateProvider: IStateProvider = new NullStateProvider();
|
const nullStateProvider: IStateProvider = new NullStateProvider();
|
||||||
nullStateProvider.setState(State.CacheMatchedKey, "outputValue");
|
nullStateProvider.setState(State.CacheMatchedKey, "outputValue");
|
||||||
@ -68,4 +84,5 @@ test("NullStateProvider saves outputs", async () => {
|
|||||||
|
|
||||||
expect(getStateMock).toHaveBeenCalledTimes(0);
|
expect(getStateMock).toHaveBeenCalledTimes(0);
|
||||||
expect(setOutputMock).toHaveBeenCalledTimes(2);
|
expect(setOutputMock).toHaveBeenCalledTimes(2);
|
||||||
|
expect(saveStateMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
103
dist/restore-only/index.js
vendored
103
dist/restore-only/index.js
vendored
@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
// Cache not found
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
||||||
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
@ -10045,7 +10047,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
@ -10061,10 +10063,6 @@ function isExactKeyMatch(key, cacheKey) {
|
|||||||
}) === 0);
|
}) === 0);
|
||||||
}
|
}
|
||||||
exports.isExactKeyMatch = isExactKeyMatch;
|
exports.isExactKeyMatch = isExactKeyMatch;
|
||||||
function setCacheHitOutput(isCacheHit) {
|
|
||||||
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
|
|
||||||
}
|
|
||||||
exports.setCacheHitOutput = setCacheHitOutput;
|
|
||||||
function logWarning(message) {
|
function logWarning(message) {
|
||||||
const warningPrefix = "[warning]";
|
const warningPrefix = "[warning]";
|
||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
@ -38205,7 +38203,7 @@ const path = __importStar(__webpack_require__(622));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Function also mutates the args array. For non-mutation call with passing an empty array.
|
// Returns tar path and type: BSD or GNU
|
||||||
function getTarPath() {
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
@ -38237,6 +38235,7 @@ function getTarPath() {
|
|||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
// Default assumption is GNU tar is present in path
|
||||||
return {
|
return {
|
||||||
path: yield io.which('tar', true),
|
path: yield io.which('tar', true),
|
||||||
type: constants_1.ArchiveToolType.GNU
|
type: constants_1.ArchiveToolType.GNU
|
||||||
@ -38250,6 +38249,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
const tarFile = 'cache.tar';
|
const tarFile = 'cache.tar';
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
@ -38287,8 +38287,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
return args;
|
return args;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getArgs(compressionMethod, type, archivePath = '') {
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
const tarPath = yield getTarPath();
|
const tarPath = yield getTarPath();
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
const compressionArgs = type !== 'create'
|
const compressionArgs = type !== 'create'
|
||||||
@ -38298,11 +38300,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
|
|||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
return [...compressionArgs, ...tarArgs].join(' ');
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return [...tarArgs, ...compressionArgs].join(' ');
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
}
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
@ -38325,8 +38331,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d --long=30 -o',
|
'zstd -d --long=30 -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: [
|
: [
|
||||||
'--use-compress-program',
|
'--use-compress-program',
|
||||||
@ -38337,8 +38342,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d -o',
|
'zstd -d -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
default:
|
default:
|
||||||
@ -38346,6 +38350,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
@ -38361,7 +38366,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 --long=30 -o',
|
'zstd -T0 --long=30 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38373,7 +38377,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 -o',
|
'zstd -T0 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38384,44 +38387,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function listTar(archivePath, compressionMethod) {
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = yield getArgs(compressionMethod, 'list', archivePath);
|
for (const command of commands) {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(args);
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// List the contents of a tar
|
||||||
|
function listTar(archivePath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
|
yield execCommands(commands);
|
||||||
|
});
|
||||||
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = yield getArgs(compressionMethod, 'extract', archivePath);
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
try {
|
yield execCommands(commands);
|
||||||
yield exec_1.exec(args);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const args = yield getArgs(compressionMethod, 'create');
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
try {
|
yield execCommands(commands, archiveFolder);
|
||||||
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
@ -47237,15 +47241,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
let compressionMethod = yield utils.getCompressionMethod();
|
let compressionMethod = yield utils.getCompressionMethod();
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
try {
|
// path are needed to compute version
|
||||||
// path are needed to compute version
|
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
compressionMethod
|
||||||
compressionMethod
|
});
|
||||||
});
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
}
|
// This is to support the old cache entry created by gzip on windows.
|
||||||
catch (error) {
|
|
||||||
// This is to support the old cache entry created
|
|
||||||
// by the old version of the cache action on windows.
|
|
||||||
if (process.platform === 'win32' &&
|
if (process.platform === 'win32' &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
compressionMethod = constants_1.CompressionMethod.Gzip;
|
||||||
@ -47253,17 +47254,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
throw error;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw error;
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
@ -50459,7 +50458,7 @@ function restoreImpl(stateProvider) {
|
|||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
if (!utils.isCacheFeatureAvailable()) {
|
if (!utils.isCacheFeatureAvailable()) {
|
||||||
utils.setCacheHitOutput(false);
|
core.setOutput(constants_1.Outputs.CacheHit, "false");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Validate inputs, this can cause task failure
|
// Validate inputs, this can cause task failure
|
||||||
|
103
dist/restore/index.js
vendored
103
dist/restore/index.js
vendored
@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
// Cache not found
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
||||||
@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
@ -38114,7 +38116,7 @@ const path = __importStar(__webpack_require__(622));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Function also mutates the args array. For non-mutation call with passing an empty array.
|
// Returns tar path and type: BSD or GNU
|
||||||
function getTarPath() {
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
@ -38146,6 +38148,7 @@ function getTarPath() {
|
|||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
// Default assumption is GNU tar is present in path
|
||||||
return {
|
return {
|
||||||
path: yield io.which('tar', true),
|
path: yield io.which('tar', true),
|
||||||
type: constants_1.ArchiveToolType.GNU
|
type: constants_1.ArchiveToolType.GNU
|
||||||
@ -38159,6 +38162,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
const tarFile = 'cache.tar';
|
const tarFile = 'cache.tar';
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
@ -38196,8 +38200,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
return args;
|
return args;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getArgs(compressionMethod, type, archivePath = '') {
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
const tarPath = yield getTarPath();
|
const tarPath = yield getTarPath();
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
const compressionArgs = type !== 'create'
|
const compressionArgs = type !== 'create'
|
||||||
@ -38207,11 +38213,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
|
|||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
return [...compressionArgs, ...tarArgs].join(' ');
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return [...tarArgs, ...compressionArgs].join(' ');
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
}
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
@ -38234,8 +38244,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d --long=30 -o',
|
'zstd -d --long=30 -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: [
|
: [
|
||||||
'--use-compress-program',
|
'--use-compress-program',
|
||||||
@ -38246,8 +38255,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d -o',
|
'zstd -d -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
default:
|
default:
|
||||||
@ -38255,6 +38263,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
@ -38270,7 +38279,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 --long=30 -o',
|
'zstd -T0 --long=30 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38282,7 +38290,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 -o',
|
'zstd -T0 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38293,44 +38300,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function listTar(archivePath, compressionMethod) {
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = yield getArgs(compressionMethod, 'list', archivePath);
|
for (const command of commands) {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(args);
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// List the contents of a tar
|
||||||
|
function listTar(archivePath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
|
yield execCommands(commands);
|
||||||
|
});
|
||||||
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = yield getArgs(compressionMethod, 'extract', archivePath);
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
try {
|
yield execCommands(commands);
|
||||||
yield exec_1.exec(args);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const args = yield getArgs(compressionMethod, 'create');
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
try {
|
yield execCommands(commands, archiveFolder);
|
||||||
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
@ -38565,7 +38573,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
@ -38581,10 +38589,6 @@ function isExactKeyMatch(key, cacheKey) {
|
|||||||
}) === 0);
|
}) === 0);
|
||||||
}
|
}
|
||||||
exports.isExactKeyMatch = isExactKeyMatch;
|
exports.isExactKeyMatch = isExactKeyMatch;
|
||||||
function setCacheHitOutput(isCacheHit) {
|
|
||||||
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
|
|
||||||
}
|
|
||||||
exports.setCacheHitOutput = setCacheHitOutput;
|
|
||||||
function logWarning(message) {
|
function logWarning(message) {
|
||||||
const warningPrefix = "[warning]";
|
const warningPrefix = "[warning]";
|
||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
@ -47208,15 +47212,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
let compressionMethod = yield utils.getCompressionMethod();
|
let compressionMethod = yield utils.getCompressionMethod();
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
try {
|
// path are needed to compute version
|
||||||
// path are needed to compute version
|
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
compressionMethod
|
||||||
compressionMethod
|
});
|
||||||
});
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
}
|
// This is to support the old cache entry created by gzip on windows.
|
||||||
catch (error) {
|
|
||||||
// This is to support the old cache entry created
|
|
||||||
// by the old version of the cache action on windows.
|
|
||||||
if (process.platform === 'win32' &&
|
if (process.platform === 'win32' &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
compressionMethod = constants_1.CompressionMethod.Gzip;
|
||||||
@ -47224,17 +47225,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
throw error;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw error;
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
@ -50459,7 +50458,7 @@ function restoreImpl(stateProvider) {
|
|||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
try {
|
try {
|
||||||
if (!utils.isCacheFeatureAvailable()) {
|
if (!utils.isCacheFeatureAvailable()) {
|
||||||
utils.setCacheHitOutput(false);
|
core.setOutput(constants_1.Outputs.CacheHit, "false");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Validate inputs, this can cause task failure
|
// Validate inputs, this can cause task failure
|
||||||
|
101
dist/save-only/index.js
vendored
101
dist/save-only/index.js
vendored
@ -3461,6 +3461,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
// Cache not found
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
||||||
@ -3469,6 +3470,7 @@ function getCacheEntry(keys, paths, options) {
|
|||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||||
throw new Error('Cache not found.');
|
throw new Error('Cache not found.');
|
||||||
}
|
}
|
||||||
core.setSecret(cacheDownloadUrl);
|
core.setSecret(cacheDownloadUrl);
|
||||||
@ -38138,7 +38140,7 @@ const path = __importStar(__webpack_require__(622));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Function also mutates the args array. For non-mutation call with passing an empty array.
|
// Returns tar path and type: BSD or GNU
|
||||||
function getTarPath() {
|
function getTarPath() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
@ -38170,6 +38172,7 @@ function getTarPath() {
|
|||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
// Default assumption is GNU tar is present in path
|
||||||
return {
|
return {
|
||||||
path: yield io.which('tar', true),
|
path: yield io.which('tar', true),
|
||||||
type: constants_1.ArchiveToolType.GNU
|
type: constants_1.ArchiveToolType.GNU
|
||||||
@ -38183,6 +38186,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
const tarFile = 'cache.tar';
|
const tarFile = 'cache.tar';
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// Speficic args for BSD tar on windows for workaround
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
@ -38220,8 +38224,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|||||||
return args;
|
return args;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getArgs(compressionMethod, type, archivePath = '') {
|
// Returns commands to run tar and compression program
|
||||||
|
function getCommands(compressionMethod, type, archivePath = '') {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let args;
|
||||||
const tarPath = yield getTarPath();
|
const tarPath = yield getTarPath();
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||||
const compressionArgs = type !== 'create'
|
const compressionArgs = type !== 'create'
|
||||||
@ -38231,11 +38237,15 @@ function getArgs(compressionMethod, type, archivePath = '') {
|
|||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||||
IS_WINDOWS;
|
IS_WINDOWS;
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||||
return [...compressionArgs, ...tarArgs].join(' ');
|
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return [...tarArgs, ...compressionArgs].join(' ');
|
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||||
}
|
}
|
||||||
|
if (BSD_TAR_ZSTD) {
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
return [args.join(' ')];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function getWorkingDirectory() {
|
function getWorkingDirectory() {
|
||||||
@ -38258,8 +38268,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d --long=30 -o',
|
'zstd -d --long=30 -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: [
|
: [
|
||||||
'--use-compress-program',
|
'--use-compress-program',
|
||||||
@ -38270,8 +38279,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
? [
|
? [
|
||||||
'zstd -d -o',
|
'zstd -d -o',
|
||||||
constants_1.TarFilename,
|
constants_1.TarFilename,
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
'&&'
|
|
||||||
]
|
]
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||||
default:
|
default:
|
||||||
@ -38279,6 +38287,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Used for creating the archive
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
@ -38294,7 +38303,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 --long=30 -o',
|
'zstd -T0 --long=30 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38306,7 +38314,6 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return BSD_TAR_ZSTD
|
return BSD_TAR_ZSTD
|
||||||
? [
|
? [
|
||||||
'&&',
|
|
||||||
'zstd -T0 -o',
|
'zstd -T0 -o',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
constants_1.TarFilename
|
constants_1.TarFilename
|
||||||
@ -38317,44 +38324,45 @@ function getCompressionProgram(tarPath, compressionMethod) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function listTar(archivePath, compressionMethod) {
|
// Executes all commands as separate processes
|
||||||
|
function execCommands(commands, cwd) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = yield getArgs(compressionMethod, 'list', archivePath);
|
for (const command of commands) {
|
||||||
try {
|
try {
|
||||||
yield exec_1.exec(args);
|
yield exec_1.exec(command, undefined, { cwd });
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// List the contents of a tar
|
||||||
|
function listTar(archivePath, compressionMethod) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||||
|
yield execCommands(commands);
|
||||||
|
});
|
||||||
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
|
// Extract a tar
|
||||||
function extractTar(archivePath, compressionMethod) {
|
function extractTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = yield getArgs(compressionMethod, 'extract', archivePath);
|
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||||
try {
|
yield execCommands(commands);
|
||||||
yield exec_1.exec(args);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
|
// Create a tar
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||||
const args = yield getArgs(compressionMethod, 'create');
|
const commands = yield getCommands(compressionMethod, 'create');
|
||||||
try {
|
yield execCommands(commands, archiveFolder);
|
||||||
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
@ -38589,7 +38597,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0;
|
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(196);
|
const constants_1 = __webpack_require__(196);
|
||||||
@ -38605,10 +38613,6 @@ function isExactKeyMatch(key, cacheKey) {
|
|||||||
}) === 0);
|
}) === 0);
|
||||||
}
|
}
|
||||||
exports.isExactKeyMatch = isExactKeyMatch;
|
exports.isExactKeyMatch = isExactKeyMatch;
|
||||||
function setCacheHitOutput(isCacheHit) {
|
|
||||||
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
|
|
||||||
}
|
|
||||||
exports.setCacheHitOutput = setCacheHitOutput;
|
|
||||||
function logWarning(message) {
|
function logWarning(message) {
|
||||||
const warningPrefix = "[warning]";
|
const warningPrefix = "[warning]";
|
||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
@ -47321,15 +47325,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
let compressionMethod = yield utils.getCompressionMethod();
|
let compressionMethod = yield utils.getCompressionMethod();
|
||||||
let archivePath = '';
|
let archivePath = '';
|
||||||
try {
|
try {
|
||||||
try {
|
// path are needed to compute version
|
||||||
// path are needed to compute version
|
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
compressionMethod
|
||||||
compressionMethod
|
});
|
||||||
});
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
}
|
// This is to support the old cache entry created by gzip on windows.
|
||||||
catch (error) {
|
|
||||||
// This is to support the old cache entry created
|
|
||||||
// by the old version of the cache action on windows.
|
|
||||||
if (process.platform === 'win32' &&
|
if (process.platform === 'win32' &&
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
compressionMethod = constants_1.CompressionMethod.Gzip;
|
compressionMethod = constants_1.CompressionMethod.Gzip;
|
||||||
@ -47337,17 +47338,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
compressionMethod
|
compressionMethod
|
||||||
});
|
});
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
throw error;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw error;
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
|
4531
dist/save/index.js
vendored
4531
dist/save/index.js
vendored
File diff suppressed because it is too large
Load Diff
@ -317,7 +317,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||||||
### Bash shell
|
### Bash shell
|
||||||
```yaml
|
```yaml
|
||||||
- name: Get npm cache directory
|
- name: Get npm cache directory
|
||||||
id: npm-cache
|
id: npm-cache-dir
|
||||||
shell: bash
|
shell: bash
|
||||||
run: echo "dir=$(npm config get cache)" >> ${GITHUB_OUTPUT}
|
run: echo "dir=$(npm config get cache)" >> ${GITHUB_OUTPUT}
|
||||||
```
|
```
|
||||||
@ -325,7 +325,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||||||
### PWSH shell
|
### PWSH shell
|
||||||
```yaml
|
```yaml
|
||||||
- name: Get npm cache directory
|
- name: Get npm cache directory
|
||||||
id: npm-cache
|
id: npm-cache-dir
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
run: echo "dir=$(npm config get cache)" >> ${env:GITHUB_OUTPUT}
|
run: echo "dir=$(npm config get cache)" >> ${env:GITHUB_OUTPUT}
|
||||||
```
|
```
|
||||||
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -1,15 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.1.0-beta.2",
|
"version": "3.1.0-beta.3",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.1.0-beta.2",
|
"version": "3.1.0-beta.3",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "3.1.0-beta.2",
|
"@actions/cache": "3.1.0-beta.3",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
@ -36,9 +36,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.1.0-beta.2",
|
"version": "3.1.0-beta.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
|
||||||
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==",
|
"integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
@ -9722,9 +9722,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.1.0-beta.2",
|
"version": "3.1.0-beta.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz",
|
||||||
"integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==",
|
"integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.1.0-beta.2",
|
"version": "3.1.0-beta.3",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
@ -23,7 +23,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "3.1.0-beta.2",
|
"@actions/cache": "3.1.0-beta.3",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
name: 'Restore Only Cache'
|
name: 'Restore Cache'
|
||||||
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
path:
|
path:
|
||||||
description: 'The same list of files, directories, and wildcard patterns to restore cache that were used while saving it'
|
description: 'A list of files, directories, and wildcard patterns to restore'
|
||||||
required: true
|
required: true
|
||||||
key:
|
key:
|
||||||
description: 'An explicit key for restoring the cache'
|
description: 'An explicit key for restoring the cache'
|
||||||
@ -15,9 +15,9 @@ outputs:
|
|||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
cache-primary-key:
|
cache-primary-key:
|
||||||
description: 'Cache primary key passed in the input to use in subsequent steps of the workflow'
|
description: 'A resolved cache key for which cache match was attempted'
|
||||||
cache-restore-key:
|
cache-restore-key:
|
||||||
description: 'Cache key restored'
|
description: 'Restore key which was used to restore the cache. It will not be set in case there was an exact match with primary key itself'
|
||||||
runs:
|
runs:
|
||||||
using: 'node16'
|
using: 'node16'
|
||||||
main: '../dist/restore-only/index.js'
|
main: '../dist/restore-only/index.js'
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
name: 'Save Only Cache'
|
name: 'Save a cache'
|
||||||
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
|
@ -10,7 +10,7 @@ async function restoreImpl(
|
|||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
try {
|
try {
|
||||||
if (!utils.isCacheFeatureAvailable()) {
|
if (!utils.isCacheFeatureAvailable()) {
|
||||||
utils.setCacheHitOutput(false);
|
core.setOutput(Outputs.CacheHit, "false");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import * as cache from "@actions/cache";
|
import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Outputs, RefKey } from "../constants";
|
import { RefKey } from "../constants";
|
||||||
|
|
||||||
export function isGhes(): boolean {
|
export function isGhes(): boolean {
|
||||||
const ghUrl = new URL(
|
const ghUrl = new URL(
|
||||||
@ -19,10 +19,6 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setCacheHitOutput(isCacheHit: boolean): void {
|
|
||||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logWarning(message: string): void {
|
export function logWarning(message: string): void {
|
||||||
const warningPrefix = "[warning]";
|
const warningPrefix = "[warning]";
|
||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
|
@ -14,7 +14,7 @@ A cache today is immutable and cannot be updated. But some use cases require the
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
primes-${{ runner.os }}
|
primes-${{ runner.os }}
|
||||||
```
|
```
|
||||||
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
|
Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits).
|
||||||
|
|
||||||
## Use cache across feature branches
|
## Use cache across feature branches
|
||||||
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
||||||
|
Loading…
Reference in New Issue
Block a user