Add GCS cache backend with GitHub fallback

Port GCS support from danySam/gcs-cache. When gcs-bucket input is set,
cache to Google Cloud Storage via Application Default Credentials; else
fall back to GitHub's cache service transparently.

- New inputs: gcs-bucket, gcs-path-prefix (default: github-cache)
- New src/utils/gcsCache.ts wraps @actions/cache restore/save
- restoreImpl/saveImpl swap @actions/cache import for local wrapper
- Adds @google-cloud/storage ^7.16.0, bumps typescript to ^5.8.3
- Regenerated dist/ via npm run build

Skipped: upstream workflow removals, README/examples/RELEASES churn,
licensed/codeql config, GCS integration-test workflow.
pull/1751/head
Jasperhino 4 days ago
parent 27d5ce7f10
commit d751c19fb0

@ -1,6 +1,6 @@
name: 'Cache'
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
name: 'Cache with GCS Support'
description: 'Cache artifacts to Google Cloud Storage or GitHub with automatic fallback'
author: 'danySam'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache and restore'
@ -26,6 +26,13 @@ inputs:
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
default: 'false'
required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
required: false
default: 'github-cache'
save-always:
description: 'Run the post step to save the cache even if another step before fails'
default: 'false'
@ -38,7 +45,7 @@ outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
runs:
using: 'node24'
using: 'node20'
main: 'dist/restore/index.js'
post: 'dist/save/index.js'
post-if: "success()"

File diff suppressed because one or more lines are too long

171312
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

171204
dist/save-only/index.js vendored

File diff suppressed because one or more lines are too long

171204
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

11547
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "5.0.4",
"version": "4.2.3",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@ -23,32 +23,30 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^5.0.5",
"@actions/core": "^2.0.3",
"@actions/exec": "^2.0.0",
"@actions/io": "^2.0.0"
"@actions/cache": "^4.0.3",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.3",
"@google-cloud/storage": "^7.16.0"
},
"devDependencies": {
"@types/jest": "^29.5.14",
"@types/jest": "^27.5.2",
"@types/nock": "^11.1.0",
"@types/node": "^24.1.0",
"@typescript-eslint/eslint-plugin": "^7.2.0",
"@typescript-eslint/parser": "^7.2.0",
"@types/node": "^16.18.3",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"@vercel/ncc": "^0.38.3",
"eslint": "^8.28.0",
"eslint-config-prettier": "^9.1.2",
"eslint-plugin-import": "^2.32.0",
"eslint-plugin-jest": "^27.9.0",
"eslint-plugin-prettier": "^5.5.3",
"eslint-plugin-simple-import-sort": "^12.1.1",
"jest": "^29.7.0",
"jest-circus": "^29.7.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jest": "^26.9.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-simple-import-sort": "^7.0.0",
"jest": "^28.1.3",
"jest-circus": "^27.5.1",
"nock": "^13.2.9",
"prettier": "^3.6.2",
"ts-jest": "^29.4.0",
"prettier": "^2.8.0",
"ts-jest": "^28.0.8",
"typescript": "^5.8.3"
},
"engines": {
"node": ">=24"
}
}

@ -1,6 +1,6 @@
name: 'Restore Cache'
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
name: 'Restore Cache with GCS Support'
description: 'Restore cache artifacts from Google Cloud Storage or GitHub with automatic fallback'
author: 'danySam'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to restore'
@ -23,6 +23,13 @@ inputs:
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
default: 'false'
required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
default: 'github-cache'
required: false
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
@ -31,7 +38,7 @@ outputs:
cache-matched-key:
description: 'Key of the cache that was restored, it could either be the primary key on cache-hit or a partial/complete match of one of the restore keys'
runs:
using: 'node24'
using: 'node20'
main: '../dist/restore-only/index.js'
branding:
icon: 'archive'

@ -1,6 +1,6 @@
name: 'Save a cache'
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
name: 'Save Cache with GCS Support'
description: 'Save cache artifacts to Google Cloud Storage or GitHub with automatic fallback'
author: 'danySam'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache'
@ -15,8 +15,15 @@ inputs:
description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms'
default: 'false'
required: false
gcs-bucket:
description: 'Google Cloud Storage bucket name to use for caching. When provided, GCS will be used as the cache backend.'
required: false
gcs-path-prefix:
description: 'Optional prefix path within the GCS bucket for cache files'
default: 'github-cache'
required: false
runs:
using: 'node24'
using: 'node20'
main: '../dist/save-only/index.js'
branding:
icon: 'archive'

@ -5,7 +5,9 @@ export enum Inputs {
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action
LookupOnly = "lookup-only" // Input for cache, restore action
LookupOnly = "lookup-only", // Input for cache, restore action
GCSBucket = "gcs-bucket", // Input for cache, restore, save action
GCSPathPrefix = "gcs-path-prefix" // Input for cache, restore, save action
}
export enum Outputs {

@ -1,4 +1,3 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { Events, Inputs, Outputs, State } from "./constants";
@ -8,6 +7,7 @@ import {
StateProvider
} from "./stateProvider";
import * as utils from "./utils/actionUtils";
import * as cache from "./utils/gcsCache";
export async function restoreImpl(
stateProvider: IStateProvider,

@ -1,4 +1,3 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { Events, Inputs, State } from "./constants";
@ -8,6 +7,7 @@ import {
StateProvider
} from "./stateProvider";
import * as utils from "./utils/actionUtils";
import * as cache from "./utils/gcsCache";
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to

@ -1,7 +1,7 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { RefKey } from "../constants";
import { Inputs, RefKey } from "../constants";
export function isGhes(): boolean {
const ghUrl = new URL(
@ -66,7 +66,42 @@ export function getInputAsBool(
return result.toLowerCase() === "true";
}
// Check if GCS is configured and available
export function isGCSAvailable(): boolean {
try {
const bucket = core.getInput(Inputs.GCSBucket);
if (!bucket) {
core.info(
"GCS bucket name not provided, falling back to GitHub cache"
);
return false;
}
// We're not doing an actual authentication check here as it would require
// making an API call. The Storage client will handle authentication later
// via Application Default Credentials (ADC) which supports multiple auth methods:
// - Service account JSON key file (GOOGLE_APPLICATION_CREDENTIALS)
// - Workload Identity Federation
// - Metadata server-based auth (GCE, GKE)
// - User credentials from gcloud CLI
core.info(`GCS bucket configured: ${bucket}`);
return true;
} catch (error) {
logWarning(
`Failed to check GCS availability: ${(error as Error).message}`
);
return false;
}
}
export function isCacheFeatureAvailable(): boolean {
// Check if GCS cache is available
if (isGCSAvailable()) {
return true;
}
// Otherwise, check GitHub cache
if (cache.isFeatureAvailable()) {
return true;
}

@ -0,0 +1,275 @@
import * as cache from "@actions/cache";
import * as utils from "@actions/cache/lib/internal/cacheUtils";
import { CompressionMethod } from "@actions/cache/lib/internal/constants";
import {
createTar,
extractTar,
listTar
} from "@actions/cache/lib/internal/tar";
import { DownloadOptions, UploadOptions } from "@actions/cache/lib/options";
import * as core from "@actions/core";
import { Storage } from "@google-cloud/storage";
import * as path from "path";
import { Inputs } from "../constants";
import { isGCSAvailable } from "./actionUtils";
const DEFAULT_PATH_PREFIX = "github-cache";
// Function to initialize GCS client using Application Default Credentials
function getGCSClient(): Storage | null {
try {
core.info("Initializing GCS client");
return new Storage();
} catch (error) {
core.warning(
`Failed to initialize GCS client: ${(error as Error).message}`
);
return null;
}
}
export async function restoreCache(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive?: boolean
): Promise<string | undefined> {
// Check if GCS is available
if (isGCSAvailable()) {
try {
const result = await restoreFromGCS(
paths,
primaryKey,
restoreKeys,
options
);
if (result) {
core.info(`Cache restored from GCS with key: ${result}`);
return result;
}
core.info("Cache not found in GCS, falling back to GitHub cache");
} catch (error) {
core.warning(
`Failed to restore from GCS: ${(error as Error).message}`
);
core.info("Falling back to GitHub cache");
}
} else {
core.info("GCS not configured, using GitHub cache");
}
// Fall back to GitHub cache
return await cache.restoreCache(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
);
}
export async function saveCache(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive?: boolean
): Promise<number> {
if (isGCSAvailable()) {
try {
const result = await saveToGCS(paths, key);
if (result) {
core.info(`Cache saved to GCS with key: [${key} | ${result}]`);
return 1; // Success ID
}
core.warning("Failed to save to GCS, falling back to GitHub cache");
return -1;
} catch (error) {
core.warning(`Failed to save to GCS: ${(error as Error).message}`);
core.info("Falling back to GitHub cache");
}
} else {
core.info("GCS not configured, using GitHub cache");
}
// Fall back to GitHub cache
return await cache.saveCache(paths, key, options, enableCrossOsArchive);
}
// Function that checks if the cache feature is available (either GCS or GitHub cache)
export function isFeatureAvailable(): boolean {
return isGCSAvailable() || cache.isFeatureAvailable();
}
async function restoreFromGCS(
_paths: string[], // validate paths?
primaryKey: string,
restoreKeys: string[] = [],
options?: DownloadOptions
): Promise<string | undefined> {
const storage = getGCSClient();
if (!storage) {
return undefined;
}
const bucket = core.getInput(Inputs.GCSBucket);
const pathPrefix =
core.getInput(Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX;
const compressionMethod = await utils.getCompressionMethod();
const archiveFolder = await utils.createTempDirectory();
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
);
const keys = [primaryKey, ...restoreKeys];
const gcsPath = await findFileOnGCS(
storage,
bucket,
pathPrefix,
keys,
compressionMethod
);
if (!gcsPath) {
core.info(`No matching cache found`);
return undefined;
}
// If lookup only, just return the key
if (options?.lookupOnly) {
core.info(`Cache found in GCS with key: ${gcsPath}`);
return gcsPath;
}
try {
core.info(`Downloading from GCS: ${bucket}/${gcsPath}`);
const file = storage.bucket(bucket).file(gcsPath);
await file.download({ destination: archivePath });
if (core.isDebug()) {
await listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
await extractTar(archivePath, compressionMethod);
core.info("Cache restored successfully");
return gcsPath;
} catch (error) {
core.warning(`Failed to restore: ${(error as Error).message}`);
} finally {
try {
await utils.unlinkFile(archivePath);
} catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
}
function getGCSPath(
pathPrefix: string,
key: string,
compressionMethod: CompressionMethod
): string {
return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`;
}
async function saveToGCS(
paths: string[],
key: string
): Promise<string | undefined> {
const storage = getGCSClient();
if (!storage) {
return undefined;
}
const bucket = core.getInput(Inputs.GCSBucket);
const pathPrefix =
core.getInput(Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX;
const compressionMethod = await utils.getCompressionMethod();
const cachePaths = await utils.resolvePaths(paths);
core.debug("Cache Paths:");
core.debug(`${JSON.stringify(cachePaths)}`);
if (cachePaths.length === 0) {
throw new Error(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
);
}
const archiveFolder = await utils.createTempDirectory();
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
);
core.debug(`Archive Path: ${archivePath}`);
try {
await createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
await listTar(archivePath, compressionMethod);
}
const gcsPath = getGCSPath(pathPrefix, key, compressionMethod);
core.info(`Uploading to GCS: ${bucket}/${gcsPath}`);
const [file] = await storage.bucket(bucket).upload(archivePath, {
destination: gcsPath,
resumable: false
});
return file.metadata.id;
} catch (error) {
core.warning(
`Error creating or uploading cache: ${(error as Error).message}`
);
throw new Error(
`Error creating or uploading cache: ${(error as Error).message}`
);
} finally {
try {
await utils.unlinkFile(archivePath);
} catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
}
async function findFileOnGCS(
storage: Storage,
bucket: string,
pathPrefix: string,
keys: string[],
compressionMethod: CompressionMethod
): Promise<string | undefined> {
for (const key of keys) {
const gcsPath = getGCSPath(pathPrefix, key, compressionMethod);
if (await checkFileExists(storage, bucket, gcsPath)) {
core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`);
return gcsPath;
}
}
return undefined;
}
async function checkFileExists(
storage: Storage,
bucket: string,
path: string
): Promise<boolean> {
const [exists] = await storage.bucket(bucket).file(path).exists();
return exists;
}
Loading…
Cancel
Save