From dc097e3bb908e8cf3bbab71f4f9c2c07c4faec68 Mon Sep 17 00:00:00 2001 From: "A. Pascual" Date: Sun, 11 Dec 2022 08:19:22 +0100 Subject: [PATCH 01/14] Update examples.md (#1026) According with the behavior description the id of "Get npm cache directory" must be "npm-cache-dir". I checked it in my own project. --- examples.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples.md b/examples.md index fbaf32d..13741fb 100644 --- a/examples.md +++ b/examples.md @@ -317,7 +317,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr ### Bash shell ```yaml - name: Get npm cache directory - id: npm-cache + id: npm-cache-dir shell: bash run: echo "dir=$(npm config get cache)" >> ${GITHUB_OUTPUT} ``` @@ -325,7 +325,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr ### PWSH shell ```yaml - name: Get npm cache directory - id: npm-cache + id: npm-cache-dir shell: pwsh run: echo "dir=$(npm config get cache)" >> ${env:GITHUB_OUTPUT} ``` From ac25611caef967612169ab7e95533cf932c32270 Mon Sep 17 00:00:00 2001 From: teatimeguest Date: Sun, 11 Dec 2022 18:04:57 +0900 Subject: [PATCH 02/14] docs: fix an invalid link in workarounds.md (#929) --- tips-and-workarounds.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tips-and-workarounds.md b/tips-and-workarounds.md index ba0d27b..0777bed 100644 --- a/tips-and-workarounds.md +++ b/tips-and-workarounds.md @@ -14,7 +14,7 @@ A cache today is immutable and cannot be updated. But some use cases require the restore-keys: | primes-${{ runner.os }} ``` - Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits). + Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits). ## Use cache across feature branches Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. From 84ea3e177d6fd1d6d6ee3c633465f7b08f178f15 Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Mon, 5 Dec 2022 10:33:45 +0000 Subject: [PATCH 03/14] Changes for beta release --- RELEASES.md | 3 +++ package-lock.json | 38 ++++++++++++++++++++------------------ package.json | 4 ++-- 3 files changed, 25 insertions(+), 20 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index 507c9bd..f26c241 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -40,3 +40,6 @@ ### 3.0.11 - Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0` - Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0` + +### 3.1.0-beta.1 +- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) diff --git a/package-lock.json b/package-lock.json index 95ea0a3..19758f1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.0.11", + "version": "3.1.0-beta.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.0.11", + "version": "3.1.0-beta.1", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.5", + "@actions/cache": "3.1.0-beta.1", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,15 +36,16 @@ } }, "node_modules/@actions/cache": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz", - "integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==", + "version": "3.1.0-beta.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.1.tgz", + "integrity": "sha512-E+lNTJ4x1baOVHbhkkGK7JebxChMM/ogDSWIuDJsiPlpi7bzzL8RnKTk4zlZ3OYmWK8tF2/5QZMerg3rY4c/9A==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.8.0", "semver": "^6.1.0", @@ -111,14 +112,14 @@ } }, "node_modules/@azure/abort-controller": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz", - "integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", "dependencies": { - "tslib": "^2.0.0" + "tslib": "^2.2.0" }, "engines": { - "node": ">=8.0.0" + "node": ">=12.0.0" } }, "node_modules/@azure/abort-controller/node_modules/tslib": { @@ -9721,15 +9722,16 @@ }, "dependencies": { "@actions/cache": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz", - "integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==", + "version": "3.1.0-beta.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.1.tgz", + "integrity": "sha512-E+lNTJ4x1baOVHbhkkGK7JebxChMM/ogDSWIuDJsiPlpi7bzzL8RnKTk4zlZ3OYmWK8tF2/5QZMerg3rY4c/9A==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.8.0", "semver": "^6.1.0", @@ -9792,11 +9794,11 @@ } }, "@azure/abort-controller": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz", - "integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", "requires": { - "tslib": "^2.0.0" + "tslib": "^2.2.0" }, "dependencies": { "tslib": { diff --git a/package.json b/package.json index cd6d0f3..7b1a106 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.0.11", + "version": "3.1.0-beta.1", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.5", + "@actions/cache": "3.1.0-beta.1", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" From 9e9a19bf5f01a9938d544994d247e2a69c7dd84a Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Mon, 5 Dec 2022 10:47:13 +0000 Subject: [PATCH 04/14] Update dist folder --- dist/restore/index.js | 741 ++++++++++++++---------------------------- dist/save/index.js | 741 ++++++++++++++---------------------------- 2 files changed, 504 insertions(+), 978 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 423d8ae..201288a 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1177,10 +1177,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3046,19 +3045,18 @@ exports.default = _default; /***/ }), /* 105 */, /* 106 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(__unusedmodule, exports) { "use strict"; Object.defineProperty(exports, '__esModule', { value: true }); -var tslib = __webpack_require__(640); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var listenersMap = new WeakMap(); -var abortedMap = new WeakMap(); +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * @@ -3072,8 +3070,8 @@ var abortedMap = new WeakMap(); * await doAsyncWork(AbortSignal.none); * ``` */ -var AbortSignal = /** @class */ (function () { - function AbortSignal() { +class AbortSignal { + constructor() { /** * onabort event listener. */ @@ -3081,74 +3079,65 @@ var AbortSignal = /** @class */ (function () { listenersMap.set(this, []); abortedMap.set(this, false); } - Object.defineProperty(AbortSignal.prototype, "aborted", { - /** - * Status of whether aborted or not. - * - * @readonly - */ - get: function () { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - }, - enumerable: false, - configurable: true - }); - Object.defineProperty(AbortSignal, "none", { - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - get: function () { - return new AbortSignal(); - }, - enumerable: false, - configurable: true - }); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ - AbortSignal.prototype.addEventListener = function ( + addEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); + const listeners = listenersMap.get(this); listeners.push(listener); - }; + } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ - AbortSignal.prototype.removeEventListener = function ( + removeEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); - var index = listeners.indexOf(listener); + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); if (index > -1) { listeners.splice(index, 1); } - }; + } /** * Dispatches a synthetic event to the AbortSignal. */ - AbortSignal.prototype.dispatchEvent = function (_event) { + dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - }; - return AbortSignal; -}()); + } +} /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. @@ -3166,12 +3155,12 @@ function abortSignal(signal) { if (signal.onabort) { signal.onabort.call(signal); } - var listeners = listenersMap.get(signal); + const listeners = listenersMap.get(signal); if (listeners) { // Create a copy of listeners so mutations to the array // (e.g. via removeListener calls) don't affect the listeners // we invoke. - listeners.slice().forEach(function (listener) { + listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); } @@ -3197,15 +3186,12 @@ function abortSignal(signal) { * } * ``` */ -var AbortError = /** @class */ (function (_super) { - tslib.__extends(AbortError, _super); - function AbortError(message) { - var _this = _super.call(this, message) || this; - _this.name = "AbortError"; - return _this; +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - return AbortError; -}(Error)); +} /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. @@ -3240,10 +3226,9 @@ var AbortError = /** @class */ (function (_super) { * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ -var AbortController = /** @class */ (function () { +class AbortController { // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - function AbortController(parentSignals) { - var _this = this; + constructor(parentSignals) { this._signal = new AbortSignal(); if (!parentSignals) { return; @@ -3253,8 +3238,7 @@ var AbortController = /** @class */ (function () { // eslint-disable-next-line prefer-rest-params parentSignals = arguments; } - for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) { - var parentSignal = parentSignals_1[_i]; + for (const parentSignal of parentSignals) { // if the parent signal has already had abort() called, // then call abort on this signal as well. if (parentSignal.aborted) { @@ -3262,47 +3246,42 @@ var AbortController = /** @class */ (function () { } else { // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", function () { - _this.abort(); + parentSignal.addEventListener("abort", () => { + this.abort(); }); } } } - Object.defineProperty(AbortController.prototype, "signal", { - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get: function () { - return this._signal; - }, - enumerable: false, - configurable: true - }); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ - AbortController.prototype.abort = function () { + abort() { abortSignal(this._signal); - }; + } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ - AbortController.timeout = function (ms) { - var signal = new AbortSignal(); - var timer = setTimeout(abortSignal, ms, signal); + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); // Prevent the active Timer from keeping the Node.js event loop active. if (typeof timer.unref === "function") { timer.unref(); } return signal; - }; - return AbortController; -}()); + } +} exports.AbortController = AbortController; exports.AbortError = AbortError; @@ -38055,21 +38034,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Function also mutates the args array. For non-mutation call with passing an empty array. +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38077,24 +38054,83 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } + } + return args; + }); +} +function getArgs(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + return [...compressionArgs, ...tarArgs].join(' '); + } + else { + return [...tarArgs, ...compressionArgs].join(' '); } }); } @@ -38103,32 +38139,89 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '&&' + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '&&' + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); +} +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + '&&', + 'zstd -T0 --long=30 -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + '&&', + 'zstd -T0 -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); } function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const args = yield getArgs(compressionMethod, 'list', archivePath); + try { + yield exec_1.exec(args); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.listTar = listTar; @@ -38137,57 +38230,27 @@ function extractTar(archivePath, compressionMethod) { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const args = yield getArgs(compressionMethod, 'extract', archivePath); + try { + yield exec_1.exec(args); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.extractTar = extractTar; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const args = yield getArgs(compressionMethod, 'create'); + try { + yield exec_1.exec(args, undefined, { cwd: archiveFolder }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.createTar = createTar; @@ -44126,318 +44189,7 @@ exports.default = _default; /***/ }), -/* 640 */ -/***/ (function(module) { - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - - -/***/ }), +/* 640 */, /* 641 */, /* 642 */, /* 643 */, @@ -53485,6 +53237,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53493,6 +53250,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/save/index.js b/dist/save/index.js index f413dd0..f861a7c 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1177,10 +1177,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3046,19 +3045,18 @@ exports.default = _default; /***/ }), /* 105 */, /* 106 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(__unusedmodule, exports) { "use strict"; Object.defineProperty(exports, '__esModule', { value: true }); -var tslib = __webpack_require__(640); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var listenersMap = new WeakMap(); -var abortedMap = new WeakMap(); +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * @@ -3072,8 +3070,8 @@ var abortedMap = new WeakMap(); * await doAsyncWork(AbortSignal.none); * ``` */ -var AbortSignal = /** @class */ (function () { - function AbortSignal() { +class AbortSignal { + constructor() { /** * onabort event listener. */ @@ -3081,74 +3079,65 @@ var AbortSignal = /** @class */ (function () { listenersMap.set(this, []); abortedMap.set(this, false); } - Object.defineProperty(AbortSignal.prototype, "aborted", { - /** - * Status of whether aborted or not. - * - * @readonly - */ - get: function () { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - }, - enumerable: false, - configurable: true - }); - Object.defineProperty(AbortSignal, "none", { - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - get: function () { - return new AbortSignal(); - }, - enumerable: false, - configurable: true - }); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ - AbortSignal.prototype.addEventListener = function ( + addEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); + const listeners = listenersMap.get(this); listeners.push(listener); - }; + } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ - AbortSignal.prototype.removeEventListener = function ( + removeEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); - var index = listeners.indexOf(listener); + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); if (index > -1) { listeners.splice(index, 1); } - }; + } /** * Dispatches a synthetic event to the AbortSignal. */ - AbortSignal.prototype.dispatchEvent = function (_event) { + dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - }; - return AbortSignal; -}()); + } +} /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. @@ -3166,12 +3155,12 @@ function abortSignal(signal) { if (signal.onabort) { signal.onabort.call(signal); } - var listeners = listenersMap.get(signal); + const listeners = listenersMap.get(signal); if (listeners) { // Create a copy of listeners so mutations to the array // (e.g. via removeListener calls) don't affect the listeners // we invoke. - listeners.slice().forEach(function (listener) { + listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); } @@ -3197,15 +3186,12 @@ function abortSignal(signal) { * } * ``` */ -var AbortError = /** @class */ (function (_super) { - tslib.__extends(AbortError, _super); - function AbortError(message) { - var _this = _super.call(this, message) || this; - _this.name = "AbortError"; - return _this; +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - return AbortError; -}(Error)); +} /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. @@ -3240,10 +3226,9 @@ var AbortError = /** @class */ (function (_super) { * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ -var AbortController = /** @class */ (function () { +class AbortController { // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - function AbortController(parentSignals) { - var _this = this; + constructor(parentSignals) { this._signal = new AbortSignal(); if (!parentSignals) { return; @@ -3253,8 +3238,7 @@ var AbortController = /** @class */ (function () { // eslint-disable-next-line prefer-rest-params parentSignals = arguments; } - for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) { - var parentSignal = parentSignals_1[_i]; + for (const parentSignal of parentSignals) { // if the parent signal has already had abort() called, // then call abort on this signal as well. if (parentSignal.aborted) { @@ -3262,47 +3246,42 @@ var AbortController = /** @class */ (function () { } else { // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", function () { - _this.abort(); + parentSignal.addEventListener("abort", () => { + this.abort(); }); } } } - Object.defineProperty(AbortController.prototype, "signal", { - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get: function () { - return this._signal; - }, - enumerable: false, - configurable: true - }); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ - AbortController.prototype.abort = function () { + abort() { abortSignal(this._signal); - }; + } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ - AbortController.timeout = function (ms) { - var signal = new AbortSignal(); - var timer = setTimeout(abortSignal, ms, signal); + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); // Prevent the active Timer from keeping the Node.js event loop active. if (typeof timer.unref === "function") { timer.unref(); } return signal; - }; - return AbortController; -}()); + } +} exports.AbortController = AbortController; exports.AbortError = AbortError; @@ -38055,21 +38034,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Function also mutates the args array. For non-mutation call with passing an empty array. +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38077,24 +38054,83 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } + } + return args; + }); +} +function getArgs(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + return [...compressionArgs, ...tarArgs].join(' '); + } + else { + return [...tarArgs, ...compressionArgs].join(' '); } }); } @@ -38103,32 +38139,89 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '&&' + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '&&' + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); +} +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + '&&', + 'zstd -T0 --long=30 -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + '&&', + 'zstd -T0 -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); } function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const args = yield getArgs(compressionMethod, 'list', archivePath); + try { + yield exec_1.exec(args); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.listTar = listTar; @@ -38137,57 +38230,27 @@ function extractTar(archivePath, compressionMethod) { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const args = yield getArgs(compressionMethod, 'extract', archivePath); + try { + yield exec_1.exec(args); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.extractTar = extractTar; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const args = yield getArgs(compressionMethod, 'create'); + try { + yield exec_1.exec(args, undefined, { cwd: archiveFolder }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } }); } exports.createTar = createTar; @@ -44126,318 +44189,7 @@ exports.default = _default; /***/ }), -/* 640 */ -/***/ (function(module) { - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - - -/***/ }), +/* 640 */, /* 641 */, /* 642 */, /* 643 */, @@ -53488,6 +53240,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53496,6 +53253,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), From 5a2b5e5714b3b3a54e5734ee8f6d996d0ee9c16a Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Thu, 8 Dec 2022 09:54:44 +0000 Subject: [PATCH 05/14] Add support for gzip fallback for restore of old cache on windows --- RELEASES.md | 3 +++ dist/restore/index.js | 31 ++++++++++++++++++++++++++----- dist/save/index.js | 31 ++++++++++++++++++++++++++----- package-lock.json | 18 +++++++++--------- package.json | 4 ++-- 5 files changed, 66 insertions(+), 21 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index f26c241..02d5daa 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -43,3 +43,6 @@ ### 3.1.0-beta.1 - Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) + +### 3.1.0-beta.2 +- Added support for fallback to gzip to restore old caches on windows. diff --git a/dist/restore/index.js b/dist/restore/index.js index 201288a..02411fb 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -47081,6 +47081,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47142,13 +47143,33 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); + try { + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + } + catch (error) { + // This is to support the old cache entry created + // by the old version of the cache action on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + throw error; + } + } + else { + throw error; + } + } if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; diff --git a/dist/save/index.js b/dist/save/index.js index f861a7c..e20301a 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -47167,6 +47167,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47228,13 +47229,33 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); + try { + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + } + catch (error) { + // This is to support the old cache entry created + // by the old version of the cache action on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + throw error; + } + } + else { + throw error; + } + } if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; diff --git a/package-lock.json b/package-lock.json index 19758f1..c5810b1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.1.0-beta.1", + "version": "3.1.0-beta.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.1.0-beta.1", + "version": "3.1.0-beta.2", "license": "MIT", "dependencies": { - "@actions/cache": "3.1.0-beta.1", + "@actions/cache": "3.1.0-beta.2", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,9 +36,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.1.0-beta.1", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.1.tgz", - "integrity": "sha512-E+lNTJ4x1baOVHbhkkGK7JebxChMM/ogDSWIuDJsiPlpi7bzzL8RnKTk4zlZ3OYmWK8tF2/5QZMerg3rY4c/9A==", + "version": "3.1.0-beta.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", + "integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", @@ -9722,9 +9722,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.1.0-beta.1", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.1.tgz", - "integrity": "sha512-E+lNTJ4x1baOVHbhkkGK7JebxChMM/ogDSWIuDJsiPlpi7bzzL8RnKTk4zlZ3OYmWK8tF2/5QZMerg3rY4c/9A==", + "version": "3.1.0-beta.2", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", + "integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index 7b1a106..bfa8e58 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.1.0-beta.1", + "version": "3.1.0-beta.2", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "3.1.0-beta.1", + "@actions/cache": "3.1.0-beta.2", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" From a2137c625cf42636129d84ff5e5f0e8beac11a08 Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Mon, 12 Dec 2022 13:01:08 +0000 Subject: [PATCH 06/14] update for new beta release --- RELEASES.md | 3 ++ dist/restore/index.js | 95 ++++++++++++++++++++++--------------------- dist/save/index.js | 95 ++++++++++++++++++++++--------------------- package-lock.json | 18 ++++---- package.json | 4 +- 5 files changed, 112 insertions(+), 103 deletions(-) diff --git a/RELEASES.md b/RELEASES.md index 02d5daa..45d5b7c 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -46,3 +46,6 @@ ### 3.1.0-beta.2 - Added support for fallback to gzip to restore old caches on windows. + +### 3.1.0-beta.3 +- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows. diff --git a/dist/restore/index.js b/dist/restore/index.js index 02411fb..1370ed2 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) { const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { + // Cache not found return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38034,7 +38036,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Function also mutates the args array. For non-mutation call with passing an empty array. +// Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -38066,6 +38068,7 @@ function getTarPath() { default: break; } + // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU @@ -38079,6 +38082,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; @@ -38116,8 +38120,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return args; }); } -function getArgs(compressionMethod, type, archivePath = '') { +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { + let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' @@ -38127,11 +38133,15 @@ function getArgs(compressionMethod, type, archivePath = '') { compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { - return [...compressionArgs, ...tarArgs].join(' '); + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { - return [...tarArgs, ...compressionArgs].join(' '); + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38154,8 +38164,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d --long=30 -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', @@ -38166,8 +38175,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: @@ -38175,6 +38183,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { } }); } +// Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. @@ -38190,7 +38199,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 --long=30 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38202,7 +38210,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38213,44 +38220,45 @@ function getCompressionProgram(tarPath, compressionMethod) { } }); } -function listTar(archivePath, compressionMethod) { +// Executes all commands as separate processes +function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = yield getArgs(compressionMethod, 'list', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } } }); } +// List the contents of a tar +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); + }); +} exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = yield getArgs(compressionMethod, 'extract', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const args = yield getArgs(compressionMethod, 'create'); - try { - yield exec_1.exec(args, undefined, { cwd: archiveFolder }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47147,15 +47155,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - try { - // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - } - catch (error) { - // This is to support the old cache entry created - // by the old version of the cache action on windows. + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // This is to support the old cache entry created by gzip on windows. if (process.platform === 'win32' && compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod = constants_1.CompressionMethod.Gzip; @@ -47163,17 +47168,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - throw error; + return undefined; } + core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); } else { - throw error; + // Cache not found + return undefined; } } - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry diff --git a/dist/save/index.js b/dist/save/index.js index e20301a..bd02644 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) { const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { + // Cache not found return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38034,7 +38036,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Function also mutates the args array. For non-mutation call with passing an empty array. +// Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -38066,6 +38068,7 @@ function getTarPath() { default: break; } + // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU @@ -38079,6 +38082,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; @@ -38116,8 +38120,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return args; }); } -function getArgs(compressionMethod, type, archivePath = '') { +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { + let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' @@ -38127,11 +38133,15 @@ function getArgs(compressionMethod, type, archivePath = '') { compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { - return [...compressionArgs, ...tarArgs].join(' '); + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { - return [...tarArgs, ...compressionArgs].join(' '); + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38154,8 +38164,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d --long=30 -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', @@ -38166,8 +38175,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: @@ -38175,6 +38183,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { } }); } +// Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. @@ -38190,7 +38199,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 --long=30 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38202,7 +38210,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38213,44 +38220,45 @@ function getCompressionProgram(tarPath, compressionMethod) { } }); } -function listTar(archivePath, compressionMethod) { +// Executes all commands as separate processes +function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = yield getArgs(compressionMethod, 'list', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } } }); } +// List the contents of a tar +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); + }); +} exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = yield getArgs(compressionMethod, 'extract', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const args = yield getArgs(compressionMethod, 'create'); - try { - yield exec_1.exec(args, undefined, { cwd: archiveFolder }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47233,15 +47241,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - try { - // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - } - catch (error) { - // This is to support the old cache entry created - // by the old version of the cache action on windows. + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // This is to support the old cache entry created by gzip on windows. if (process.platform === 'win32' && compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod = constants_1.CompressionMethod.Gzip; @@ -47249,17 +47254,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - throw error; + return undefined; } + core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); } else { - throw error; + // Cache not found + return undefined; } } - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry diff --git a/package-lock.json b/package-lock.json index c5810b1..d329382 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.1.0-beta.2", + "version": "3.1.0-beta.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.1.0-beta.2", + "version": "3.1.0-beta.3", "license": "MIT", "dependencies": { - "@actions/cache": "3.1.0-beta.2", + "@actions/cache": "3.1.0-beta.3", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,9 +36,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.1.0-beta.2", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", - "integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", + "version": "3.1.0-beta.3", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", + "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", @@ -9722,9 +9722,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.1.0-beta.2", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.2.tgz", - "integrity": "sha512-xt9NLWPCh5WU9Z5ITeGpT5Nza/57wMXeLsGuNVcRCIVpPuNTf3Puj82vjZZQw4rGqiCCs+n4+hnkTcE9BKw2sw==", + "version": "3.1.0-beta.3", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", + "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index bfa8e58..2c94ed5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.1.0-beta.2", + "version": "3.1.0-beta.3", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "3.1.0-beta.2", + "@actions/cache": "3.1.0-beta.3", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" From 0a6e5b052a82f93b4a63f896c969b9dee1306bde Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:49:45 +0530 Subject: [PATCH 07/14] Update save/action.yml Co-authored-by: Bishal Prasad --- save/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/save/action.yml b/save/action.yml index 85414eb..44d712b 100644 --- a/save/action.yml +++ b/save/action.yml @@ -1,4 +1,4 @@ -name: 'Save Only Cache' +name: 'Save a cache' description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time' author: 'GitHub' inputs: From dd740c87de0625fcebbf3688cb3e981c9a82d4bf Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:49:53 +0530 Subject: [PATCH 08/14] Update restore/action.yml Co-authored-by: Bishal Prasad --- restore/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/restore/action.yml b/restore/action.yml index ef5681e..1f9bba7 100644 --- a/restore/action.yml +++ b/restore/action.yml @@ -17,7 +17,7 @@ outputs: cache-primary-key: description: 'Cache primary key passed in the input to use in subsequent steps of the workflow' cache-restore-key: - description: 'Cache key restored' + description: 'Restore key which was used to restore the cache. It will not be set in case there was an exact match with primary key itself' runs: using: 'node16' main: '../dist/restore-only/index.js' From df53d3c04b5a6f2bda91b4ddc8eae4445718eba5 Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:49:59 +0530 Subject: [PATCH 09/14] Update restore/action.yml Co-authored-by: Bishal Prasad --- restore/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/restore/action.yml b/restore/action.yml index 1f9bba7..b294a22 100644 --- a/restore/action.yml +++ b/restore/action.yml @@ -15,7 +15,7 @@ outputs: cache-hit: description: 'A boolean value to indicate an exact match was found for the primary key' cache-primary-key: - description: 'Cache primary key passed in the input to use in subsequent steps of the workflow' + description: 'A resolved cache key for which cache match was attempted' cache-restore-key: description: 'Restore key which was used to restore the cache. It will not be set in case there was an exact match with primary key itself' runs: From 29d6c7aa7fb325a957d659734782f53ab37003b0 Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:50:09 +0530 Subject: [PATCH 10/14] Update restore/action.yml Co-authored-by: Bishal Prasad --- restore/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/restore/action.yml b/restore/action.yml index b294a22..5afb4a6 100644 --- a/restore/action.yml +++ b/restore/action.yml @@ -3,7 +3,7 @@ description: 'Restore Cache artifacts like dependencies and build outputs to imp author: 'GitHub' inputs: path: - description: 'The same list of files, directories, and wildcard patterns to restore cache that were used while saving it' + description: 'A list of files, directories, and wildcard patterns to restore' required: true key: description: 'An explicit key for restoring the cache' From 87a7d01109aff7a5cb02832d6716190c5c3816c1 Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:50:18 +0530 Subject: [PATCH 11/14] Update restore/action.yml Co-authored-by: Bishal Prasad --- restore/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/restore/action.yml b/restore/action.yml index 5afb4a6..4eb3c5a 100644 --- a/restore/action.yml +++ b/restore/action.yml @@ -1,4 +1,4 @@ -name: 'Restore Only Cache' +name: 'Restore Cache' description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time' author: 'GitHub' inputs: From 407044787b04191de805cb3ae2c4697e4f5bb7c2 Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 13:50:54 +0000 Subject: [PATCH 12/14] Added more assertions as values can't be checked --- __tests__/stateProvider.test.ts | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/__tests__/stateProvider.test.ts b/__tests__/stateProvider.test.ts index bb1b231..315086c 100644 --- a/__tests__/stateProvider.test.ts +++ b/__tests__/stateProvider.test.ts @@ -30,12 +30,19 @@ test("StateProvider saves states", async () => { .mockImplementation(name => jest.requireActual("@actions/core").getState(name) ); + const saveStateMock = jest .spyOn(core, "saveState") .mockImplementation((key, value) => { return jest.requireActual("@actions/core").saveState(key, value); }); + const setOutputMock = jest + .spyOn(core, "setOutput") + .mockImplementation((key, value) => { + return jest.requireActual("@actions/core").setOutput(key, value); + }); + const cacheMatchedKey = "node-cache"; const stateProvider: IStateProvider = new StateProvider(); @@ -46,6 +53,7 @@ test("StateProvider saves states", async () => { expect(getStateMock).toHaveBeenCalledTimes(2); expect(saveStateMock).toHaveBeenCalledTimes(2); + expect(setOutputMock).toHaveBeenCalledTimes(0); }); test("NullStateProvider saves outputs", async () => { @@ -54,11 +62,19 @@ test("NullStateProvider saves outputs", async () => { .mockImplementation(name => jest.requireActual("@actions/core").getState(name) ); + const setOutputMock = jest .spyOn(core, "setOutput") .mockImplementation((key, value) => { return jest.requireActual("@actions/core").setOutput(key, value); }); + + const saveStateMock = jest + .spyOn(core, "saveState") + .mockImplementation((key, value) => { + return jest.requireActual("@actions/core").saveState(key, value); + }); + const cacheMatchedKey = "node-cache"; const nullStateProvider: IStateProvider = new NullStateProvider(); nullStateProvider.setState(State.CacheMatchedKey, "outputValue"); @@ -68,4 +84,5 @@ test("NullStateProvider saves outputs", async () => { expect(getStateMock).toHaveBeenCalledTimes(0); expect(setOutputMock).toHaveBeenCalledTimes(2); + expect(saveStateMock).toHaveBeenCalledTimes(0); }); From 1ddc49105de28795736c1e98c4413738eb69debc Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 14:03:16 +0000 Subject: [PATCH 13/14] Removed unused code --- __tests__/restoreImpl.test.ts | 8 ++++---- dist/restore-only/index.js | 8 ++------ dist/restore/index.js | 8 ++------ dist/save-only/index.js | 6 +----- dist/save/index.js | 6 +----- src/restoreImpl.ts | 2 +- src/utils/actionUtils.ts | 6 +----- 7 files changed, 12 insertions(+), 32 deletions(-) diff --git a/__tests__/restoreImpl.test.ts b/__tests__/restoreImpl.test.ts index 66bab8d..47a9e52 100644 --- a/__tests__/restoreImpl.test.ts +++ b/__tests__/restoreImpl.test.ts @@ -66,13 +66,13 @@ test("restore without AC available should no-op", async () => { ); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); - expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false"); }); test("restore on GHES without AC available should no-op", async () => { @@ -82,13 +82,13 @@ test("restore on GHES without AC available should no-op", async () => { ); const restoreCacheMock = jest.spyOn(cache, "restoreCache"); - const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); await run(new StateProvider()); expect(restoreCacheMock).toHaveBeenCalledTimes(0); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); - expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false"); }); test("restore on GHES with AC available ", async () => { diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index 0192c06..f9a179e 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -10045,7 +10045,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(196); @@ -10061,10 +10061,6 @@ function isExactKeyMatch(key, cacheKey) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); @@ -50459,7 +50455,7 @@ function restoreImpl(stateProvider) { return __awaiter(this, void 0, void 0, function* () { try { if (!utils.isCacheFeatureAvailable()) { - utils.setCacheHitOutput(false); + core.setOutput(constants_1.Outputs.CacheHit, "false"); return; } // Validate inputs, this can cause task failure diff --git a/dist/restore/index.js b/dist/restore/index.js index 8638fbe..50f3443 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -38565,7 +38565,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(196); @@ -38581,10 +38581,6 @@ function isExactKeyMatch(key, cacheKey) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); @@ -50459,7 +50455,7 @@ function restoreImpl(stateProvider) { return __awaiter(this, void 0, void 0, function* () { try { if (!utils.isCacheFeatureAvailable()) { - utils.setCacheHitOutput(false); + core.setOutput(constants_1.Outputs.CacheHit, "false"); return; } // Validate inputs, this can cause task failure diff --git a/dist/save-only/index.js b/dist/save-only/index.js index f417c7a..5b244a2 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -38589,7 +38589,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(196); @@ -38605,10 +38605,6 @@ function isExactKeyMatch(key, cacheKey) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); diff --git a/dist/save/index.js b/dist/save/index.js index c11e5fb..af26def 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -38560,7 +38560,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; const cache = __importStar(__webpack_require__(692)); const core = __importStar(__webpack_require__(470)); const constants_1 = __webpack_require__(196); @@ -38576,10 +38576,6 @@ function isExactKeyMatch(key, cacheKey) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); diff --git a/src/restoreImpl.ts b/src/restoreImpl.ts index dec2437..fb43517 100644 --- a/src/restoreImpl.ts +++ b/src/restoreImpl.ts @@ -10,7 +10,7 @@ async function restoreImpl( ): Promise { try { if (!utils.isCacheFeatureAvailable()) { - utils.setCacheHitOutput(false); + core.setOutput(Outputs.CacheHit, "false"); return; } diff --git a/src/utils/actionUtils.ts b/src/utils/actionUtils.ts index b48d36f..6a640a5 100644 --- a/src/utils/actionUtils.ts +++ b/src/utils/actionUtils.ts @@ -1,7 +1,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; -import { Outputs, RefKey } from "../constants"; +import { RefKey } from "../constants"; export function isGhes(): boolean { const ghUrl = new URL( @@ -19,10 +19,6 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean { ); } -export function setCacheHitOutput(isCacheHit: boolean): void { - core.setOutput(Outputs.CacheHit, isCacheHit.toString()); -} - export function logWarning(message: string): void { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); From 075ad790b0168885fff05f6a7212a095d08f5906 Mon Sep 17 00:00:00 2001 From: Sankalp Kotewar <98868223+kotewar@users.noreply.github.com> Date: Mon, 12 Dec 2022 14:18:42 +0000 Subject: [PATCH 14/14] Merged beta branch and resolved conflicts --- dist/restore-only/index.js | 95 ++++++++++++++++++++------------------ dist/save-only/index.js | 95 ++++++++++++++++++++------------------ 2 files changed, 98 insertions(+), 92 deletions(-) diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index f9a179e..ffed496 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -3432,6 +3432,7 @@ function getCacheEntry(keys, paths, options) { const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { + // Cache not found return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3440,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38201,7 +38203,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Function also mutates the args array. For non-mutation call with passing an empty array. +// Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -38233,6 +38235,7 @@ function getTarPath() { default: break; } + // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU @@ -38246,6 +38249,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; @@ -38283,8 +38287,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return args; }); } -function getArgs(compressionMethod, type, archivePath = '') { +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { + let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' @@ -38294,11 +38300,15 @@ function getArgs(compressionMethod, type, archivePath = '') { compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { - return [...compressionArgs, ...tarArgs].join(' '); + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { - return [...tarArgs, ...compressionArgs].join(' '); + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38321,8 +38331,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d --long=30 -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', @@ -38333,8 +38342,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: @@ -38342,6 +38350,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { } }); } +// Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. @@ -38357,7 +38366,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 --long=30 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38369,7 +38377,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38380,44 +38387,45 @@ function getCompressionProgram(tarPath, compressionMethod) { } }); } -function listTar(archivePath, compressionMethod) { +// Executes all commands as separate processes +function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = yield getArgs(compressionMethod, 'list', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } } }); } +// List the contents of a tar +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); + }); +} exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = yield getArgs(compressionMethod, 'extract', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const args = yield getArgs(compressionMethod, 'create'); - try { - yield exec_1.exec(args, undefined, { cwd: archiveFolder }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47233,15 +47241,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - try { - // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - } - catch (error) { - // This is to support the old cache entry created - // by the old version of the cache action on windows. + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // This is to support the old cache entry created by gzip on windows. if (process.platform === 'win32' && compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod = constants_1.CompressionMethod.Gzip; @@ -47249,17 +47254,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - throw error; + return undefined; } + core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); } else { - throw error; + // Cache not found + return undefined; } } - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry diff --git a/dist/save-only/index.js b/dist/save-only/index.js index 5b244a2..638bb7d 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -3461,6 +3461,7 @@ function getCacheEntry(keys, paths, options) { const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { + // Cache not found return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3469,6 +3470,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38138,7 +38140,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Function also mutates the args array. For non-mutation call with passing an empty array. +// Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { @@ -38170,6 +38172,7 @@ function getTarPath() { default: break; } + // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU @@ -38183,6 +38186,7 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; @@ -38220,8 +38224,10 @@ function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return args; }); } -function getArgs(compressionMethod, type, archivePath = '') { +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { + let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' @@ -38231,11 +38237,15 @@ function getArgs(compressionMethod, type, archivePath = '') { compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { - return [...compressionArgs, ...tarArgs].join(' '); + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { - return [...tarArgs, ...compressionArgs].join(' '); + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38258,8 +38268,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d --long=30 -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', @@ -38270,8 +38279,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { ? [ 'zstd -d -o', constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '&&' + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: @@ -38279,6 +38287,7 @@ function getDecompressionProgram(tarPath, compressionMethod, archivePath) { } }); } +// Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. @@ -38294,7 +38303,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 --long=30 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38306,7 +38314,6 @@ function getCompressionProgram(tarPath, compressionMethod) { case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ - '&&', 'zstd -T0 -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename @@ -38317,44 +38324,45 @@ function getCompressionProgram(tarPath, compressionMethod) { } }); } -function listTar(archivePath, compressionMethod) { +// Executes all commands as separate processes +function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = yield getArgs(compressionMethod, 'list', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } } }); } +// List the contents of a tar +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); + }); +} exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = yield getArgs(compressionMethod, 'extract', archivePath); - try { - yield exec_1.exec(args); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const args = yield getArgs(compressionMethod, 'create'); - try { - yield exec_1.exec(args, undefined, { cwd: archiveFolder }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47317,15 +47325,12 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { - try { - // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - } - catch (error) { - // This is to support the old cache entry created - // by the old version of the cache action on windows. + // path are needed to compute version + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // This is to support the old cache entry created by gzip on windows. if (process.platform === 'win32' && compressionMethod !== constants_1.CompressionMethod.Gzip) { compressionMethod = constants_1.CompressionMethod.Gzip; @@ -47333,17 +47338,15 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - throw error; + return undefined; } + core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); } else { - throw error; + // Cache not found + return undefined; } } - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry