Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate cache service v2 #1857

Open
wants to merge 43 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 37 commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
32dbccb
Add debug message
Link- May 23, 2024
264230c
add debug
Link- May 23, 2024
c8466d1
Add twirp client
Link- May 29, 2024
66d5434
Add v2 cache upload
Link- Jun 10, 2024
dccc3f7
Fix upload mechanics
Link- Jun 10, 2024
6635d12
Implement cache v2
Link- Jun 10, 2024
146143a
Implement cache v2
Link- Jun 10, 2024
9e63a77
Implement cache v2
Link- Jun 10, 2024
5e5faf7
Use zlib for compression
Link- Jun 13, 2024
5afc042
Add download cache v2
Link- Jun 17, 2024
8d7ed4f
Fix cache service url bug
Link- Jun 17, 2024
7640cf1
Fix cache misses
Link- Jun 17, 2024
e1b7e78
Fix cache misses
Link- Jun 17, 2024
04d1a7e
Add fix cache paths
Link- Jun 17, 2024
4902d3a
Add backend ids
Link- Jun 24, 2024
70e5684
Merge branch 'main' into neo-cache-service
Link- Sep 24, 2024
07e51a4
Add cache service v2 client
Link- Sep 24, 2024
e62c642
Fix service urls
Link- Sep 24, 2024
13abc95
Port restoreCache to new service
Link- Oct 9, 2024
4d1dedf
Merge branch 'main' into neo-cache-service
Link- Oct 9, 2024
d399e33
Merge branch 'main' into neo-cache-service
Link- Oct 21, 2024
89354f6
Cleanup implementation and use tarballs instead of streaming zip
Link- Oct 21, 2024
28dbd8f
Cleanups and package refactoring
Link- Oct 24, 2024
01bf918
Refactoring & cleanup
Link- Oct 24, 2024
75cdb2c
Merge branch 'main' into neo-cache-service
Link- Nov 14, 2024
9da70ff
Post merge cleanup
Link- Nov 14, 2024
4e1912a
Restore __tests__
Link- Nov 14, 2024
d109d9c
Handle ACTIONS_CACHE_SERVICE_V2 feature flag
Link- Nov 14, 2024
9dff82c
Port dependencies & remove dependency on toolkit/artifacts
Link- Nov 14, 2024
69409b3
Fix broken test
Link- Nov 14, 2024
b2557ac
Formatting and stylistic cleanup
Link- Nov 14, 2024
19cdd5f
Linter cleanups
Link- Nov 14, 2024
83baffc
Package upgrades with security fixes
Link- Nov 14, 2024
2ee77e6
Add missing function return types
Link- Nov 14, 2024
c3e354d
Remove unnecessary debug information
Link- Nov 14, 2024
ea4bf48
Remove unnecessary debug information
Link- Nov 14, 2024
5e9ef85
Lint fixes
Link- Nov 14, 2024
ab8110f
Remove unecessary packages from top level package.json
Link- Nov 14, 2024
555b03f
Revert package.json
Link- Nov 14, 2024
68ab87c
Add check to make sure archive has been created already
Link- Nov 14, 2024
6c11d44
Remove unnecessary type hints
Link- Nov 14, 2024
3ca8547
Merge branch 'neo-cache-service' of github.com:actions/toolkit into n…
Link- Nov 14, 2024
8616c31
Remove unused definitions
Link- Nov 14, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2,767 changes: 2,574 additions & 193 deletions package-lock.json

Large diffs are not rendered by default.

16 changes: 15 additions & 1 deletion package.json
Link- marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,19 @@
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"typescript": "^5.2.2"
},
"dependencies": {
Link- marked this conversation as resolved.
Show resolved Hide resolved
"@actions/artifact": "^2.1.7",
"@actions/attest": "^1.2.1",
"@actions/cache": "^3.2.4",
"@actions/core": "^1.10.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.4.0",
"@actions/http-client": "^2.2.1",
"@actions/io": "^1.1.3",
"@actions/tool-cache": "^2.0.1",
"tunnel": "^0.0.6",
"undici": "^6.18.1"
}
}
}
3 changes: 2 additions & 1 deletion packages/cache/__tests__/cacheHttpClient.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient'
import {downloadCache} from '../src/internal/cacheHttpClient'
import {getCacheVersion} from '../src/internal/cacheUtils'
import {CompressionMethod} from '../src/internal/constants'
import * as downloadUtils from '../src/internal/downloadUtils'
import {DownloadOptions, getDownloadOptions} from '../src/options'
Expand Down
2 changes: 1 addition & 1 deletion packages/cache/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,4 @@
"@types/semver": "^6.0.0",
"typescript": "^5.2.2"
}
}
}
295 changes: 292 additions & 3 deletions packages/cache/src/cache.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,22 @@
import * as core from '@actions/core'
import * as path from 'path'
import * as config from './internal/config'
import * as utils from './internal/cacheUtils'
import * as cacheHttpClient from './internal/cacheHttpClient'
import {createTar, extractTar, listTar} from './internal/tar'
import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
import {DownloadOptions, UploadOptions} from './options'

import {createTar, extractTar, listTar} from './internal/tar'
import {
CreateCacheEntryRequest,
CreateCacheEntryResponse,
FinalizeCacheEntryUploadRequest,
FinalizeCacheEntryUploadResponse,
GetCacheEntryDownloadURLRequest,
GetCacheEntryDownloadURLResponse
} from './generated/results/api/v1/cache'
import {CacheFileSizeLimit} from './internal/constants'
import {UploadCacheFile} from './internal/blob/upload-cache'
import {DownloadCacheFile} from './internal/blob/download-cache'
export class ValidationError extends Error {
constructor(message: string) {
super(message)
Expand Down Expand Up @@ -48,7 +60,6 @@ function checkKey(key: string): void {
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/

export function isFeatureAvailable(): boolean {
return !!process.env['ACTIONS_CACHE_URL']
}
Expand All @@ -72,6 +83,45 @@ export async function restoreCache(
): Promise<string | undefined> {
checkPaths(paths)

const cacheServiceVersion: string = config.getCacheServiceVersion()
switch (cacheServiceVersion) {
case 'v2':
return await restoreCachev2(
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
case 'v1':
default:
return await restoreCachev1(
Link- marked this conversation as resolved.
Show resolved Hide resolved
paths,
primaryKey,
restoreKeys,
options,
enableCrossOsArchive
)
}
}

/**
* Restores cache using the legacy Cache Service
*
* @param paths
* @param primaryKey
* @param restoreKeys
* @param options
* @param enableCrossOsArchive
* @returns
*/
async function restoreCachev1(
paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]

Expand Down Expand Up @@ -153,6 +203,106 @@ export async function restoreCache(
return undefined
}

/**
* Restores cache using the new Cache Service
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
async function restoreCachev2(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there an opportunity to share code between restoreCachev1 and restoreCachev2 instead of duplicating it all?

That would make it easier to see what's actually changing and improve long-term maintenance of supporting both paths (such as for GHES).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's easier to reason about what each function is doing this way. We don't have to couple the behaviour of each with more shared functions. The utility functions are already extracted, and what remains is the procedural implementation.

paths: string[],
primaryKey: string,
restoreKeys?: string[],
options?: DownloadOptions,
enableCrossOsArchive = false
): Promise<string | undefined> {
restoreKeys = restoreKeys || []
const keys = [primaryKey, ...restoreKeys]

core.debug('Resolved Keys:')
core.debug(JSON.stringify(keys))

if (keys.length > 10) {
throw new ValidationError(
`Key Validation Error: Keys are limited to a maximum of 10.`
)
}
for (const key of keys) {
checkKey(key)
}

let archivePath = ''
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
const backendIds: utils.BackendIds = utils.getBackendIdsFromToken()
const compressionMethod = await utils.getCompressionMethod()

const request: GetCacheEntryDownloadURLRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
Comment on lines +243 to +244
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we really need these in the request if they're encoded into the JWT?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
}

const response: GetCacheEntryDownloadURLResponse =
Link- marked this conversation as resolved.
Show resolved Hide resolved
await twirpClient.GetCacheEntryDownloadURL(request)

if (!response.ok) {
core.warning(`Cache not found for keys: ${keys.join(', ')}`)
return undefined
}

core.info(`Cache hit for: ${request.key}`)

if (options?.lookupOnly) {
core.info('Lookup only - skipping download')
return request.key
}

archivePath = path.join(
await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod)
)
core.debug(`Archive path: ${archivePath}`)
core.debug(`Starting download of artifact to: ${archivePath}`)
Link- marked this conversation as resolved.
Show resolved Hide resolved

await DownloadCacheFile(response.signedDownloadUrl, archivePath)

const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
)

if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}

await extractTar(archivePath, compressionMethod)
core.info('Cache restored successfully')

return request.key
} catch (error) {
throw new Error(`Failed to restore: ${error.message}`)
} finally {
try {
await utils.unlinkFile(archivePath)
Link- marked this conversation as resolved.
Show resolved Hide resolved
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}
}

/**
* Saves a list of files with the specified key
*
Expand All @@ -171,6 +321,31 @@ export async function saveCache(
checkPaths(paths)
checkKey(key)

const cacheServiceVersion: string = config.getCacheServiceVersion()
switch (cacheServiceVersion) {
case 'v2':
return await saveCachev2(paths, key, options, enableCrossOsArchive)
case 'v1':
default:
return await saveCachev1(paths, key, options, enableCrossOsArchive)
}
}

/**
* Save cache using the legacy Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
async function saveCachev1(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
const compressionMethod = await utils.getCompressionMethod()
let cacheId = -1

Expand Down Expand Up @@ -258,3 +433,117 @@ export async function saveCache(

return cacheId
}

/**
* Save cache using the new Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
async function saveCachev2(
paths: string[],
key: string,
options?: UploadOptions,
enableCrossOsArchive = false
): Promise<number> {
// BackendIds are retrieved form the signed JWT
const backendIds: utils.BackendIds = utils.getBackendIdsFromToken()
const compressionMethod = await utils.getCompressionMethod()
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
let cacheId = -1

const cachePaths = await utils.resolvePaths(paths)
core.debug('Cache Paths:')
core.debug(`${JSON.stringify(cachePaths)}`)

if (cachePaths.length === 0) {
throw new Error(
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
)
}

const archiveFolder = await utils.createTempDirectory()
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
)

core.debug(`Archive Path: ${archivePath}`)

try {
await createTar(archiveFolder, cachePaths, compressionMethod)
if (core.isDebug()) {
await listTar(archivePath, compressionMethod)
}

const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
core.debug(`File Size: ${archiveFileSize}`)

// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > CacheFileSizeLimit && !utils.isGhes()) {
throw new Error(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`
)
}

core.debug('Reserving Cache')
const version = utils.getCacheVersion(
paths,
compressionMethod,
enableCrossOsArchive
)
const request: CreateCacheEntryRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key,
version
}
const response: CreateCacheEntryResponse =
await twirpClient.CreateCacheEntry(request)
if (!response.ok) {
throw new ReserveCacheError(
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
)
}

core.debug(`Attempting to upload cache located at: ${archivePath}`)
await UploadCacheFile(response.signedUploadUrl, archivePath)

const finalizeRequest: FinalizeCacheEntryUploadRequest = {
workflowRunBackendId: backendIds.workflowRunBackendId,
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
key,
version,
sizeBytes: `${archiveFileSize}`
}

const finalizeResponse: FinalizeCacheEntryUploadResponse =
await twirpClient.FinalizeCacheEntryUpload(finalizeRequest)
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`)

if (!finalizeResponse.ok) {
throw new Error(
`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
)
}

cacheId = parseInt(finalizeResponse.entryId)
} catch (error) {
const typedError = error as Error
core.warning(`Failed to save: ${typedError.message}`)
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath)
} catch (error) {
core.debug(`Failed to delete archive: ${error}`)
}
}

return cacheId
}
Loading
Loading