diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts index 4381ee3..b355076 100644 --- a/__tests__/save.test.ts +++ b/__tests__/save.test.ts @@ -208,7 +208,7 @@ test("save with large cache outputs warning", async () => { expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledWith( - "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." + "Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache." ); expect(failedMock).toHaveBeenCalledTimes(0); diff --git a/package-lock.json b/package-lock.json index 986b08b..37e50d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.3", + "version": "1.1.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 626d051..7d3e348 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -150,8 +150,8 @@ async function uploadChunk( ): Promise> { core.debug( `Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange( + start + + 1} bytes at offset ${start} with content range: ${getContentRange( start, end )}` @@ -177,6 +177,7 @@ async function uploadChunk( } if (isRetryableStatusCode(response.statusCode)) { + core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); const retryResponse = await uploadChunkRequest(); if (isSuccessStatusCode(retryResponse.statusCode)) { return retryResponse; @@ -199,53 +200,43 @@ async function uploadFile( const responses: IRestResponse[] = []; const fd = fs.openSync(archivePath, "r"); - const concurrency = 4; // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks + const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32000000; // 32 MB Chunks core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); const parallelUploads = [...new Array(concurrency).keys()]; core.debug("Awaiting all uploads"); let offset = 0; - await Promise.all( - parallelUploads.map(async () => { - while (offset < fileSize) { - const chunkSize = - offset + MAX_CHUNK_SIZE > fileSize - ? fileSize - offset - : MAX_CHUNK_SIZE; - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }); - responses.push( - await uploadChunk( - restClient, - resourceUrl, - chunk, - start, - end - ) - ); - } - }) - ); - fs.closeSync(fd); - - const failedResponse = responses.find( - x => !isSuccessStatusCode(x.statusCode) - ); - if (failedResponse) { - throw new Error( - `Cache service responded with ${failedResponse.statusCode} during chunk upload.` + try { + await Promise.all( + parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE) + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + const chunk = fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }); + responses.push( + await uploadChunk( + restClient, + resourceUrl, + chunk, + start, + end + ) + ); + } + }) ); + } finally { + fs.closeSync(fd); } - return; } diff --git a/src/restore.ts b/src/restore.ts index 32b5b5e..4911e7e 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -60,7 +60,7 @@ async function run(): Promise { try { const cacheEntry = await cacheHttpClient.getCacheEntry(keys); - if (!cacheEntry || !cacheEntry?.archiveLocation) { + if (!cacheEntry?.archiveLocation) { core.info( `Cache not found for input keys: ${keys.join(", ")}.` ); @@ -78,7 +78,7 @@ async function run(): Promise { // Download the cache from the cache entry await cacheHttpClient.downloadCache( - cacheEntry?.archiveLocation, + cacheEntry.archiveLocation, archivePath ); diff --git a/src/save.ts b/src/save.ts index 9233f08..ee64e42 100644 --- a/src/save.ts +++ b/src/save.ts @@ -36,7 +36,7 @@ async function run(): Promise { core.debug("Reserving Cache"); const cacheId = await cacheHttpClient.reserveCache(primaryKey); - if (cacheId < 0) { + if (cacheId == -1) { core.info( `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` ); @@ -62,13 +62,13 @@ async function run(): Promise { if (archiveFileSize > fileSizeLimit) { utils.logWarning( `Cache size of ~${Math.round( - archiveFileSize / (1024 * 1024 * 1024) - )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` ); return; } - core.debug("Saving Cache"); + core.debug(`Saving Cache (ID: ${cacheId})`); await cacheHttpClient.saveCache(cacheId, archivePath); } catch (error) { utils.logWarning(error.message);