@ -1127,35 +1127,42 @@ function getArchiveFileSizeInBytes(filePath) {
}
exports . getArchiveFileSizeInBytes = getArchiveFileSizeInBytes ;
function resolvePaths ( patterns ) {
var e_1 , _a ;
var _ b ;
var _a, e _1 , _b , _c ;
var _ d ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const paths = [ ] ;
const workspace = ( _ b = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _ b !== void 0 ? _b : process . cwd ( ) ;
const workspace = ( _ d = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _ d !== void 0 ? _d : process . cwd ( ) ;
const globber = yield glob . create ( patterns . join ( '\n' ) , {
implicitDescendants : false
} ) ;
try {
for ( var _c = _ _asyncValues ( globber . globGenerator ( ) ) , _d ; _d = yield _c . next ( ) , ! _d . done ; ) {
const file = _d . value ;
const relativeFile = path
. relative ( workspace , file )
. replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ;
core . debug ( ` Matched: ${ relativeFile } ` ) ;
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if ( relativeFile === '' ) {
// path.relative returns empty string if workspace and file are equal
paths . push ( '.' ) ;
for ( var _e = true , _f = _ _asyncValues ( globber . globGenerator ( ) ) , _g ; _g = yield _f . next ( ) , _a = _g . done , ! _a ; ) {
_c = _g . value ;
_e = false ;
try {
const file = _c ;
const relativeFile = path
. relative ( workspace , file )
. replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ;
core . debug ( ` Matched: ${ relativeFile } ` ) ;
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if ( relativeFile === '' ) {
// path.relative returns empty string if workspace and file are equal
paths . push ( '.' ) ;
}
else {
paths . push ( ` ${ relativeFile } ` ) ;
}
}
else {
paths . push ( ` ${ relativeFile } ` ) ;
finally {
_e = true ;
}
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( _d && ! _d . done && ( _a = _c . return ) ) yield _a . call ( _c ) ;
if ( ! _e && ! _a && ( _b = _f . return ) ) yield _b . call ( _f ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
}
// Add salt to cache version to support breaking changes in cache entry
components . push ( versionSalt ) ;
return crypto
. createHash ( 'sha256' )
. update ( components . join ( '|' ) )
. digest ( 'hex' ) ;
return crypto . createHash ( 'sha256' ) . update ( components . join ( '|' ) ) . digest ( 'hex' ) ;
}
exports . getCacheVersion = getCacheVersion ;
function getCacheEntry ( keys , paths , options ) {
@ -3450,13 +3454,21 @@ function downloadCache(archiveLocation, archivePath, options) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const archiveUrl = new url _1 . URL ( archiveLocation ) ;
const downloadOptions = ( 0 , options _1 . getDownloadOptions ) ( options ) ;
if ( downloadOptions . useAzureSdk &&
archiveUrl . hostname . endsWith ( '.blob.core.windows.net' ) ) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
yield ( 0 , downloadUtils _1 . downloadCacheStorageSDK ) ( archiveLocation , archivePath , downloadOptions ) ;
if ( archiveUrl . hostname . endsWith ( '.blob.core.windows.net' ) ) {
if ( downloadOptions . useAzureSdk ) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
yield ( 0 , downloadUtils _1 . downloadCacheStorageSDK ) ( archiveLocation , archivePath , downloadOptions ) ;
}
else if ( downloadOptions . concurrentBlobDownloads ) {
// Use concurrent implementation with HttpClient to work around blob SDK issue
yield ( 0 , downloadUtils _1 . downloadCacheHttpClientConcurrent ) ( archiveLocation , archivePath , downloadOptions ) ;
}
else {
// Otherwise, download using the Actions http-client.
yield ( 0 , downloadUtils _1 . downloadCacheHttpClient ) ( archiveLocation , archivePath ) ;
}
}
else {
// Otherwise, download using the Actions http-client.
yield ( 0 , downloadUtils _1 . downloadCacheHttpClient ) ( archiveLocation , archivePath ) ;
}
} ) ;
@ -3489,9 +3501,7 @@ function getContentRange(start, end) {
}
function uploadChunk ( httpClient , resourceUrl , openStream , start , end ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . debug ( ` Uploading chunk of size ${ end -
start +
1 } bytes at offset $ { start } with content range : $ { getContentRange ( start , end ) } ` );
core . debug ( ` Uploading chunk of size ${ end - start + 1 } bytes at offset ${ start } with content range: ${ getContentRange ( start , end ) } ` ) ;
const additionalHeaders = {
'Content-Type' : 'application/octet-stream' ,
'Content-Range' : getContentRange ( start , end )
@ -4866,7 +4876,13 @@ function getProxyUrl(reqUrl) {
}
} ) ( ) ;
if ( proxyVar ) {
return new URL ( proxyVar ) ;
try {
return new URL ( proxyVar ) ;
}
catch ( _a ) {
if ( ! proxyVar . startsWith ( 'http://' ) && ! proxyVar . startsWith ( 'https://' ) )
return new URL ( ` http:// ${ proxyVar } ` ) ;
}
}
else {
return undefined ;
@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) {
if ( ! reqUrl . hostname ) {
return false ;
}
const reqHost = reqUrl . hostname ;
if ( isLoopbackAddress ( reqHost ) ) {
return true ;
}
const noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) {
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
if ( upperNoProxyItem === '*' ||
upperReqHosts . some ( x => x === upperNoProxyItem ||
x . endsWith ( ` . ${ upperNoProxyItem } ` ) ||
( upperNoProxyItem . startsWith ( '.' ) &&
x . endsWith ( ` ${ upperNoProxyItem } ` ) ) ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
function isLoopbackAddress ( host ) {
const hostLower = host . toLowerCase ( ) ;
return ( hostLower === 'localhost' ||
hostLower . startsWith ( '127.' ) ||
hostLower . startsWith ( '[::1]' ) ||
hostLower . startsWith ( '[0:0:0:0:0:0:0:1]' ) ) ;
}
//# sourceMappingURL=proxy.js.map
/***/ } ) ,
@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . downloadCacheStorageSDK = exports . downloadCacheHttpClient = exports . DownloadProgress = void 0 ;
exports . downloadCacheStorageSDK = exports . downloadCacheHttpClient Concurrent = exports . downloadCacheHttpClient = exports . DownloadProgress = void 0 ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const http _client _1 = _ _webpack _require _ _ ( 425 ) ;
const storage _blob _1 = _ _webpack _require _ _ ( 373 ) ;
@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
} ) ;
}
exports . downloadCacheHttpClient = downloadCacheHttpClient ;
/ * *
* Download the cache using the Actions toolkit http - client concurrently
*
* @ param archiveLocation the URL for the cache
* @ param archivePath the local path where the cache is saved
* /
function downloadCacheHttpClientConcurrent ( archiveLocation , archivePath , options ) {
var _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const archiveDescriptor = yield fs . promises . open ( archivePath , 'w' ) ;
const httpClient = new http _client _1 . HttpClient ( 'actions/cache' , undefined , {
socketTimeout : options . timeoutInMs ,
keepAlive : true
} ) ;
try {
const res = yield ( 0 , requestUtils _1 . retryHttpClientResponse ) ( 'downloadCacheMetadata' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return yield httpClient . request ( 'HEAD' , archiveLocation , null , { } ) ; } ) ) ;
const lengthHeader = res . message . headers [ 'content-length' ] ;
if ( lengthHeader === undefined || lengthHeader === null ) {
throw new Error ( 'Content-Length not found on blob response' ) ;
}
const length = parseInt ( lengthHeader ) ;
if ( Number . isNaN ( length ) ) {
throw new Error ( ` Could not interpret Content-Length: ${ length } ` ) ;
}
const downloads = [ ] ;
const blockSize = 4 * 1024 * 1024 ;
for ( let offset = 0 ; offset < length ; offset += blockSize ) {
const count = Math . min ( blockSize , length - offset ) ;
downloads . push ( {
offset ,
promiseGetter : ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield downloadSegmentRetry ( httpClient , archiveLocation , offset , count ) ;
} )
} ) ;
}
// reverse to use .pop instead of .shift
downloads . reverse ( ) ;
let actives = 0 ;
let bytesDownloaded = 0 ;
const progress = new DownloadProgress ( length ) ;
progress . startDisplayTimer ( ) ;
const progressFn = progress . onProgress ( ) ;
const activeDownloads = [ ] ;
let nextDownload ;
const waitAndWrite = ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const segment = yield Promise . race ( Object . values ( activeDownloads ) ) ;
yield archiveDescriptor . write ( segment . buffer , 0 , segment . count , segment . offset ) ;
actives -- ;
delete activeDownloads [ segment . offset ] ;
bytesDownloaded += segment . count ;
progressFn ( { loadedBytes : bytesDownloaded } ) ;
} ) ;
while ( ( nextDownload = downloads . pop ( ) ) ) {
activeDownloads [ nextDownload . offset ] = nextDownload . promiseGetter ( ) ;
actives ++ ;
if ( actives >= ( ( _a = options . downloadConcurrency ) !== null && _a !== void 0 ? _a : 10 ) ) {
yield waitAndWrite ( ) ;
}
}
while ( actives > 0 ) {
yield waitAndWrite ( ) ;
}
}
finally {
httpClient . dispose ( ) ;
yield archiveDescriptor . close ( ) ;
}
} ) ;
}
exports . downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent ;
function downloadSegmentRetry ( httpClient , archiveLocation , offset , count ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const retries = 5 ;
let failures = 0 ;
while ( true ) {
try {
const timeout = 30000 ;
const result = yield promiseWithTimeout ( timeout , downloadSegment ( httpClient , archiveLocation , offset , count ) ) ;
if ( typeof result === 'string' ) {
throw new Error ( 'downloadSegmentRetry failed due to timeout' ) ;
}
return result ;
}
catch ( err ) {
if ( failures >= retries ) {
throw err ;
}
failures ++ ;
}
}
} ) ;
}
function downloadSegment ( httpClient , archiveLocation , offset , count ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const partRes = yield ( 0 , requestUtils _1 . retryHttpClientResponse ) ( 'downloadCachePart' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield httpClient . get ( archiveLocation , {
Range : ` bytes= ${ offset } - ${ offset + count - 1 } `
} ) ;
} ) ) ;
if ( ! partRes . readBodyBuffer ) {
throw new Error ( 'Expected HttpClientResponse to implement readBodyBuffer' ) ;
}
return {
offset ,
count ,
buffer : yield partRes . readBodyBuffer ( )
} ;
} ) ;
}
/ * *
* Download the cache using the Azure Storage SDK . Only call this method if the
* URL points to an Azure Storage endpoint .
@ -35745,6 +35885,19 @@ class HttpClientResponse {
} ) ) ;
} ) ;
}
readBodyBuffer ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const chunks = [ ] ;
this . message . on ( 'data' , ( chunk ) => {
chunks . push ( chunk ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( Buffer . concat ( chunks ) ) ;
} ) ;
} ) ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
@ -40281,7 +40434,8 @@ exports.getUploadOptions = getUploadOptions;
* /
function getDownloadOptions ( copy ) {
const result = {
useAzureSdk : true ,
useAzureSdk : false ,
concurrentBlobDownloads : true ,
downloadConcurrency : 8 ,
timeoutInMs : 30000 ,
segmentTimeoutInMs : 600000 ,
@ -40291,6 +40445,9 @@ function getDownloadOptions(copy) {
if ( typeof copy . useAzureSdk === 'boolean' ) {
result . useAzureSdk = copy . useAzureSdk ;
}
if ( typeof copy . concurrentBlobDownloads === 'boolean' ) {
result . concurrentBlobDownloads = copy . concurrentBlobDownloads ;
}
if ( typeof copy . downloadConcurrency === 'number' ) {
result . downloadConcurrency = copy . downloadConcurrency ;
}
@ -47453,28 +47610,9 @@ module.exports = function(dst, src) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const restoreImpl _1 = _ _importDefault ( _ _webpack _require _ _ ( 835 ) ) ;
const stateProvider _1 = _ _webpack _require _ _ ( 309 ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ( 0 , restoreImpl _1 . default ) ( new stateProvider _1 . StateProvider ( ) ) ;
} ) ;
}
run ( ) ;
exports . default = run ;
const restoreImpl _1 = _ _webpack _require _ _ ( 835 ) ;
( 0 , restoreImpl _1 . restoreRun ) ( true ) ;
/***/ } ) ,
@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . restoreRun = exports . restoreOnlyRun = exports . restoreImpl = void 0 ;
const cache = _ _importStar ( _ _webpack _require _ _ ( 692 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
const stateProvider _1 = _ _webpack _require _ _ ( 309 ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 443 ) ) ;
function restoreImpl ( stateProvider ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) {
}
} ) ;
}
exports . default = restoreImpl ;
exports . restoreImpl = restoreImpl ;
function run ( stateProvider , earlyExit ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield restoreImpl ( stateProvider ) ;
}
catch ( err ) {
console . error ( err ) ;
if ( earlyExit ) {
process . exit ( 1 ) ;
}
}
// node will stay alive if any promises are not resolved,
// which is a possibility if HTTP requests are dangling
// due to retries or timeouts. We know that if we got here
// that all promises that we care about have successfully
// resolved, so simply exit with success.
if ( earlyExit ) {
process . exit ( 0 ) ;
}
} ) ;
}
function restoreOnlyRun ( earlyExit ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield run ( new stateProvider _1 . NullStateProvider ( ) , earlyExit ) ;
} ) ;
}
exports . restoreOnlyRun = restoreOnlyRun ;
function restoreRun ( earlyExit ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield run ( new stateProvider _1 . StateProvider ( ) , earlyExit ) ;
} ) ;
}
exports . restoreRun = restoreRun ;
/***/ } ) ,