|
|
|
@ -1127,17 +1127,20 @@ function getArchiveFileSizeInBytes(filePath) {
|
|
|
|
|
}
|
|
|
|
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
|
|
|
|
function resolvePaths(patterns) {
|
|
|
|
|
var e_1, _a;
|
|
|
|
|
var _b;
|
|
|
|
|
var _a, e_1, _b, _c;
|
|
|
|
|
var _d;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const paths = [];
|
|
|
|
|
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
|
|
|
|
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
|
|
|
|
const globber = yield glob.create(patterns.join('\n'), {
|
|
|
|
|
implicitDescendants: false
|
|
|
|
|
});
|
|
|
|
|
try {
|
|
|
|
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
|
|
|
|
const file = _d.value;
|
|
|
|
|
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
|
|
|
|
_c = _g.value;
|
|
|
|
|
_e = false;
|
|
|
|
|
try {
|
|
|
|
|
const file = _c;
|
|
|
|
|
const relativeFile = path
|
|
|
|
|
.relative(workspace, file)
|
|
|
|
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
|
|
|
@ -1151,11 +1154,15 @@ function resolvePaths(patterns) {
|
|
|
|
|
paths.push(`${relativeFile}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
finally {
|
|
|
|
|
_e = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
|
|
|
finally {
|
|
|
|
|
try {
|
|
|
|
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
|
|
|
|
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
|
|
|
|
}
|
|
|
|
|
finally { if (e_1) throw e_1.error; }
|
|
|
|
|
}
|
|
|
|
@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
|
|
|
|
}
|
|
|
|
|
// Add salt to cache version to support breaking changes in cache entry
|
|
|
|
|
components.push(versionSalt);
|
|
|
|
|
return crypto
|
|
|
|
|
.createHash('sha256')
|
|
|
|
|
.update(components.join('|'))
|
|
|
|
|
.digest('hex');
|
|
|
|
|
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
|
|
|
|
}
|
|
|
|
|
exports.getCacheVersion = getCacheVersion;
|
|
|
|
|
function getCacheEntry(keys, paths, options) {
|
|
|
|
@ -3450,15 +3454,23 @@ function downloadCache(archiveLocation, archivePath, options) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const archiveUrl = new url_1.URL(archiveLocation);
|
|
|
|
|
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
|
|
|
|
if (downloadOptions.useAzureSdk &&
|
|
|
|
|
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
|
|
|
|
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
|
|
|
|
if (downloadOptions.useAzureSdk) {
|
|
|
|
|
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
|
|
|
|
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
|
|
|
|
}
|
|
|
|
|
else if (downloadOptions.concurrentBlobDownloads) {
|
|
|
|
|
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
|
|
|
|
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Otherwise, download using the Actions http-client.
|
|
|
|
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.downloadCache = downloadCache;
|
|
|
|
@ -3489,9 +3501,7 @@ function getContentRange(start, end) {
|
|
|
|
|
}
|
|
|
|
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
core.debug(`Uploading chunk of size ${end -
|
|
|
|
|
start +
|
|
|
|
|
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
|
|
|
|
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
|
|
|
|
const additionalHeaders = {
|
|
|
|
|
'Content-Type': 'application/octet-stream',
|
|
|
|
|
'Content-Range': getContentRange(start, end)
|
|
|
|
@ -4866,8 +4876,14 @@ function getProxyUrl(reqUrl) {
|
|
|
|
|
}
|
|
|
|
|
})();
|
|
|
|
|
if (proxyVar) {
|
|
|
|
|
try {
|
|
|
|
|
return new URL(proxyVar);
|
|
|
|
|
}
|
|
|
|
|
catch (_a) {
|
|
|
|
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
|
|
|
|
return new URL(`http://${proxyVar}`);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
return undefined;
|
|
|
|
|
}
|
|
|
|
@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) {
|
|
|
|
|
if (!reqUrl.hostname) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
const reqHost = reqUrl.hostname;
|
|
|
|
|
if (isLoopbackAddress(reqHost)) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|
|
|
|
if (!noProxy) {
|
|
|
|
|
return false;
|
|
|
|
@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) {
|
|
|
|
|
.split(',')
|
|
|
|
|
.map(x => x.trim().toUpperCase())
|
|
|
|
|
.filter(x => x)) {
|
|
|
|
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|
|
|
|
if (upperNoProxyItem === '*' ||
|
|
|
|
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
|
|
|
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
|
|
|
|
(upperNoProxyItem.startsWith('.') &&
|
|
|
|
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
exports.checkBypass = checkBypass;
|
|
|
|
|
function isLoopbackAddress(host) {
|
|
|
|
|
const hostLower = host.toLowerCase();
|
|
|
|
|
return (hostLower === 'localhost' ||
|
|
|
|
|
hostLower.startsWith('127.') ||
|
|
|
|
|
hostLower.startsWith('[::1]') ||
|
|
|
|
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
|
|
|
|
}
|
|
|
|
|
//# sourceMappingURL=proxy.js.map
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
|
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
|
|
|
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
|
|
|
|
const core = __importStar(__webpack_require__(470));
|
|
|
|
|
const http_client_1 = __webpack_require__(425);
|
|
|
|
|
const storage_blob_1 = __webpack_require__(373);
|
|
|
|
@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
|
|
|
|
/**
|
|
|
|
|
* Download the cache using the Actions toolkit http-client concurrently
|
|
|
|
|
*
|
|
|
|
|
* @param archiveLocation the URL for the cache
|
|
|
|
|
* @param archivePath the local path where the cache is saved
|
|
|
|
|
*/
|
|
|
|
|
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
|
|
|
|
var _a;
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
|
|
|
|
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
|
|
|
|
socketTimeout: options.timeoutInMs,
|
|
|
|
|
keepAlive: true
|
|
|
|
|
});
|
|
|
|
|
try {
|
|
|
|
|
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
|
|
|
|
const lengthHeader = res.message.headers['content-length'];
|
|
|
|
|
if (lengthHeader === undefined || lengthHeader === null) {
|
|
|
|
|
throw new Error('Content-Length not found on blob response');
|
|
|
|
|
}
|
|
|
|
|
const length = parseInt(lengthHeader);
|
|
|
|
|
if (Number.isNaN(length)) {
|
|
|
|
|
throw new Error(`Could not interpret Content-Length: ${length}`);
|
|
|
|
|
}
|
|
|
|
|
const downloads = [];
|
|
|
|
|
const blockSize = 4 * 1024 * 1024;
|
|
|
|
|
for (let offset = 0; offset < length; offset += blockSize) {
|
|
|
|
|
const count = Math.min(blockSize, length - offset);
|
|
|
|
|
downloads.push({
|
|
|
|
|
offset,
|
|
|
|
|
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
|
|
|
|
})
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
// reverse to use .pop instead of .shift
|
|
|
|
|
downloads.reverse();
|
|
|
|
|
let actives = 0;
|
|
|
|
|
let bytesDownloaded = 0;
|
|
|
|
|
const progress = new DownloadProgress(length);
|
|
|
|
|
progress.startDisplayTimer();
|
|
|
|
|
const progressFn = progress.onProgress();
|
|
|
|
|
const activeDownloads = [];
|
|
|
|
|
let nextDownload;
|
|
|
|
|
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const segment = yield Promise.race(Object.values(activeDownloads));
|
|
|
|
|
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
|
|
|
|
actives--;
|
|
|
|
|
delete activeDownloads[segment.offset];
|
|
|
|
|
bytesDownloaded += segment.count;
|
|
|
|
|
progressFn({ loadedBytes: bytesDownloaded });
|
|
|
|
|
});
|
|
|
|
|
while ((nextDownload = downloads.pop())) {
|
|
|
|
|
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
|
|
|
|
actives++;
|
|
|
|
|
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
|
|
|
|
yield waitAndWrite();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
while (actives > 0) {
|
|
|
|
|
yield waitAndWrite();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
finally {
|
|
|
|
|
httpClient.dispose();
|
|
|
|
|
yield archiveDescriptor.close();
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
|
|
|
|
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const retries = 5;
|
|
|
|
|
let failures = 0;
|
|
|
|
|
while (true) {
|
|
|
|
|
try {
|
|
|
|
|
const timeout = 30000;
|
|
|
|
|
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
|
|
|
|
if (typeof result === 'string') {
|
|
|
|
|
throw new Error('downloadSegmentRetry failed due to timeout');
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
catch (err) {
|
|
|
|
|
if (failures >= retries) {
|
|
|
|
|
throw err;
|
|
|
|
|
}
|
|
|
|
|
failures++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return yield httpClient.get(archiveLocation, {
|
|
|
|
|
Range: `bytes=${offset}-${offset + count - 1}`
|
|
|
|
|
});
|
|
|
|
|
}));
|
|
|
|
|
if (!partRes.readBodyBuffer) {
|
|
|
|
|
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
|
|
|
|
}
|
|
|
|
|
return {
|
|
|
|
|
offset,
|
|
|
|
|
count,
|
|
|
|
|
buffer: yield partRes.readBodyBuffer()
|
|
|
|
|
};
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
/**
|
|
|
|
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
|
|
|
|
* URL points to an Azure Storage endpoint.
|
|
|
|
@ -35745,6 +35885,19 @@ class HttpClientResponse {
|
|
|
|
|
}));
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
readBodyBuffer() {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
const chunks = [];
|
|
|
|
|
this.message.on('data', (chunk) => {
|
|
|
|
|
chunks.push(chunk);
|
|
|
|
|
});
|
|
|
|
|
this.message.on('end', () => {
|
|
|
|
|
resolve(Buffer.concat(chunks));
|
|
|
|
|
});
|
|
|
|
|
}));
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
exports.HttpClientResponse = HttpClientResponse;
|
|
|
|
|
function isHttps(requestUrl) {
|
|
|
|
@ -40281,7 +40434,8 @@ exports.getUploadOptions = getUploadOptions;
|
|
|
|
|
*/
|
|
|
|
|
function getDownloadOptions(copy) {
|
|
|
|
|
const result = {
|
|
|
|
|
useAzureSdk: true,
|
|
|
|
|
useAzureSdk: false,
|
|
|
|
|
concurrentBlobDownloads: true,
|
|
|
|
|
downloadConcurrency: 8,
|
|
|
|
|
timeoutInMs: 30000,
|
|
|
|
|
segmentTimeoutInMs: 600000,
|
|
|
|
@ -40291,6 +40445,9 @@ function getDownloadOptions(copy) {
|
|
|
|
|
if (typeof copy.useAzureSdk === 'boolean') {
|
|
|
|
|
result.useAzureSdk = copy.useAzureSdk;
|
|
|
|
|
}
|
|
|
|
|
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
|
|
|
|
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
|
|
|
|
}
|
|
|
|
|
if (typeof copy.downloadConcurrency === 'number') {
|
|
|
|
|
result.downloadConcurrency = copy.downloadConcurrency;
|
|
|
|
|
}
|
|
|
|
@ -47453,28 +47610,9 @@ module.exports = function(dst, src) {
|
|
|
|
|
|
|
|
|
|
"use strict";
|
|
|
|
|
|
|
|
|
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
|
|
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
|
|
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
|
|
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
|
|
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
|
|
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
|
|
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
|
|
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
|
|
|
|
const stateProvider_1 = __webpack_require__(309);
|
|
|
|
|
function run() {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
run();
|
|
|
|
|
exports.default = run;
|
|
|
|
|
const restoreImpl_1 = __webpack_require__(835);
|
|
|
|
|
(0, restoreImpl_1.restoreRun)(true);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
|
|
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
|
|
|
|
const cache = __importStar(__webpack_require__(692));
|
|
|
|
|
const core = __importStar(__webpack_require__(470));
|
|
|
|
|
const constants_1 = __webpack_require__(694);
|
|
|
|
|
const stateProvider_1 = __webpack_require__(309);
|
|
|
|
|
const utils = __importStar(__webpack_require__(443));
|
|
|
|
|
function restoreImpl(stateProvider) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) {
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.default = restoreImpl;
|
|
|
|
|
exports.restoreImpl = restoreImpl;
|
|
|
|
|
function run(stateProvider, earlyExit) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
try {
|
|
|
|
|
yield restoreImpl(stateProvider);
|
|
|
|
|
}
|
|
|
|
|
catch (err) {
|
|
|
|
|
console.error(err);
|
|
|
|
|
if (earlyExit) {
|
|
|
|
|
process.exit(1);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// node will stay alive if any promises are not resolved,
|
|
|
|
|
// which is a possibility if HTTP requests are dangling
|
|
|
|
|
// due to retries or timeouts. We know that if we got here
|
|
|
|
|
// that all promises that we care about have successfully
|
|
|
|
|
// resolved, so simply exit with success.
|
|
|
|
|
if (earlyExit) {
|
|
|
|
|
process.exit(0);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
function restoreOnlyRun(earlyExit) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.restoreOnlyRun = restoreOnlyRun;
|
|
|
|
|
function restoreRun(earlyExit) {
|
|
|
|
|
return __awaiter(this, void 0, void 0, function* () {
|
|
|
|
|
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
exports.restoreRun = restoreRun;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/***/ }),
|
|
|
|
|