Skip to content

Commit

Permalink
inject-collector-cache-config supports multiple scan dir
Browse files Browse the repository at this point in the history
Closes gh-20
  • Loading branch information
rwinch committed Aug 17, 2024
1 parent f561814 commit 60936de
Show file tree
Hide file tree
Showing 8 changed files with 171 additions and 89 deletions.
68 changes: 0 additions & 68 deletions lib/cache-scandir/cache-scandir.js

This file was deleted.

26 changes: 26 additions & 0 deletions lib/cache-scandir/copy-recursive.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
'use strict'

const fs = require('fs')
const path = require('path')

const copyRecursiveSync = function (src, dest) {
const exists = fs.existsSync(src)
const stats = exists && fs.statSync(src)
const isDirectory = exists && stats.isDirectory()
if (isDirectory) {
if (!fs.existsSync(dest)) {
fs.mkdirSync(dest)
}
fs.readdirSync(src).forEach(function (childItemName) {
copyRecursiveSync(path.join(src, childItemName), path.join(dest, childItemName))
})
} else {
fs.copyFileSync(src, dest)
}
}

function copyRecursive (scanDir, cacheDir) {
copyRecursiveSync(scanDir, cacheDir)
}

module.exports = copyRecursive
6 changes: 3 additions & 3 deletions lib/cache-scandir/index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
'use strict'

const cacheScanDir = require('./cache-scandir')
const copyRecursive = require('./copy-recursive')

const [, , ...args] = process.argv
const [scanDir, cacheDir, zipFile] = args
const [scanDir, cacheDir] = args

cacheScanDir(scanDir, cacheDir, zipFile)
copyRecursive(scanDir, cacheDir)
72 changes: 64 additions & 8 deletions lib/inject-collector-cache-config-extension.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@

const expandPath = require('@antora/expand-path-helper')
const ospath = require('path')
const resolvedCacheScanDirIndexJs = require.resolve('@springio/antora-extensions/cache-scandir')
const resolvedCopyRecursiveJs = require.resolve('@springio/antora-extensions/cache-scandir')
const { createHash } = require('crypto')
const archiver = require('archiver')

module.exports.register = function ({ playbook, config = {} }) {
const logger = this.getLogger('inject-collector-cache-config-extension')
Expand All @@ -24,6 +25,7 @@ module.exports.register = function ({ playbook, config = {} }) {
if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir, { recursive: true })
}
const zipInfo = []
this.once('contentAggregated', async ({ playbook, contentAggregate }) => {
for (const { origins } of contentAggregate) {
for (const origin of origins) {
Expand Down Expand Up @@ -73,18 +75,23 @@ module.exports.register = function ({ playbook, config = {} }) {
const { scan: scanConfig = [] } = collector
// cache the output of the build
const scanDir = expandPath(scanConfig.dir, expandPathContext)
logger.info(
`Configuring collector to cache '${scanDir}' at '${cacheDir}' and zip the results at '${zipCacheFile}'`
)
const cachedCollectorConfig = createCachedCollectorConfig(scanDir, cacheDir, zipCacheFile)
logger.info(`Configuring collector to cache '${scanDir}' at '${cacheDir}'`)
const cachedCollectorConfig = createCachedCollectorConfig(scanDir, cacheDir)
normalizedCollectorConfig.push.apply(normalizedCollectorConfig, cachedCollectorConfig)
// add the zip of cache to be published
})
// add the zip of cache to be published
zipInfo.push({ cacheDir, zipCacheFile })
}
}
}
}
})
this.once('beforePublish', async () => {
for (const info of zipInfo) {
console.log(JSON.stringify(info))
await zip(fs, info.cacheDir, info.zipCacheFile)
}
})
}

function download (get, url) {
Expand All @@ -110,12 +117,61 @@ function generateWorktreeFolderName ({ url, gitdir, worktree }) {
return `${url.substr(url.lastIndexOf('/') + 1)}-${createHash('sha1').update(url).digest('hex')}`
}

function createCachedCollectorConfig (scanDir, cacheDir, zipFileName, siteDir) {
function createCachedCollectorConfig (scanDir, cacheDir) {
return [
{
run: {
command: `node '${resolvedCacheScanDirIndexJs}' '${scanDir}' '${cacheDir}' '${zipFileName}'`,
command: `node '${resolvedCopyRecursiveJs}' '${scanDir}' '${cacheDir}'`,
},
},
]
}

const zip = async function (fs, src, destination) {
const path = require('path')
const destParent = path.dirname(destination)
if (!fs.existsSync(destParent)) {
fs.mkdirs(destParent, { recursive: true })
}
const output = fs.createWriteStream(destination)
const archive = archiver('zip', {
zlib: { level: 9 }, // Sets the compression level.
})
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function () {
console.log(archive.pointer() + ' total bytes')
console.log('archiver has been finalized and the output file descriptor has closed.')
})

// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function () {
console.log('Data has been drained')
})

// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err
}
})

// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err
})

// pipe archive data to the file
archive.pipe(output)

archive.directory(src, false)

await archive.finalize()

console.log(`Saving ${src} into ${destination}`)
}
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
"./tabs-migration-extension": "./lib/tabs-migration-extension.js",
"./static-page-extension": "./lib/static-page-extension.js",
"./cache-scandir": "./lib/cache-scandir/index.js",
"./cache-scandir/copy-recursive": "./lib/cache-scandir/copy-recursive.js",
"./set-algolia-env-extension": "./lib/set-algolia-env-extension.js",
"./static-pages/search": "./lib/static/search.adoc",
"./static-pages/spring-projects": "./lib/static/spring-projects.adoc",
Expand Down
15 changes: 12 additions & 3 deletions test/cache-scandir-test.js → test/copy-recursive-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const { name: packageName } = require('#package')
const fs = require('fs')
const os = require('os')
const ospath = require('node:path')
const copyRecursive = require(packageName + '/cache-scandir/copy-recursive')

const FIXTURES_DIR = ospath.join(__dirname, 'fixtures')
describe('cache-scandir-command', () => {
Expand All @@ -28,12 +29,20 @@ describe('cache-scandir-command', () => {
it('caches the result', () => {
const scanDir = ospath.join(FIXTURES_DIR, 'generated-antora-resources')
const cacheDir = ospath.join(workSpaceDir, 'cache')
const zipFile = ospath.join(FIXTURES_DIR, '.cache/6ca8fb4-1.0.0.zip')
process.argv = ['', '', scanDir, cacheDir, zipFile]
process.argv = ['', '', scanDir, cacheDir]
require(packageName + '/cache-scandir')
expect(fs.existsSync(zipFile)).to.eql(true)
expect(fs.existsSync(ospath.join(cacheDir, 'antora.yml'))).to.eql(true)
expect(fs.existsSync(ospath.join(cacheDir, 'modules/ROOT/pages/generated.adoc'))).to.eql(true)
})
})
it('works when multiple scan_dir and contains existing dir', () => {
const scanDir = ospath.join(FIXTURES_DIR, 'generated-antora-resources')
const cacheDir = ospath.join(workSpaceDir, 'cache')
copyRecursive(scanDir, cacheDir)
const scanDir2 = ospath.join(FIXTURES_DIR, 'generated-antora-resources-2')
copyRecursive(scanDir2, cacheDir)
expect(fs.existsSync(ospath.join(cacheDir, 'antora.yml'))).to.eql(true)
expect(fs.existsSync(ospath.join(cacheDir, 'modules/ROOT/pages/generated.adoc'))).to.eql(true)
expect(fs.existsSync(ospath.join(cacheDir, 'modules/ROOT/pages/generated2.adoc'))).to.eql(true)
})
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
= Generated

This is generated
69 changes: 62 additions & 7 deletions test/inject-collector-cache-config-extension-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -185,16 +185,75 @@ describe('inject-collector-cache-config-extension', () => {
},
{
run: {
command: `node '${resolvedCacheScanDirIndexJs}' '${scan}' '${cache}' '${zipFileName}'`,
command: `node '${resolvedCacheScanDirIndexJs}' '${scan}' '${cache}'`,
},
},
],
}
expect(actual).to.eql(expected)
expect(generatorContext.messages).to.eql([
`Unable to restore cache from ${httpServerUrl}/.cache/2c4fb2f-1.0.0.zip`,
`Configuring collector to cache '${scan}' at '${cache}' and zip the results at '${zipFileName}'`,
`Configuring collector to cache '${scan}' at '${cache}'`,
])
expect(fs.existsSync(zipFileName)).to.eql(false)
await generatorContext.beforePublish()
expect(fs.existsSync(zipFileName)).to.eql(true)
})

it('cache not found multiple scans', async () => {
const tag = createTag('1.0.0')
tag.origins[0].refhash = tag.origins[0].refhash.split('').reverse().join('')
// make multiple scan dirs
tag.origins[0].descriptor = {
ext: {
collector: [{ scan: { dir: './build/antora-resources' } }, { scan: { dir: './build/antora-resources-2' } }],
},
}
contentAggregate = [tag]
ext.register.call(generatorContext, { playbook })
await generatorContext.contentAggregated({ playbook, contentAggregate })
expect(fs.existsSync(ospath.join(cacheDir, 'collector-cache/spring-security'))).to.equal(true)
const actual = contentAggregate[0].origins[0].descriptor.ext
const scan = ospath.join(cacheDir, 'collector/spring-security/build/antora-resources')
const scan2 = ospath.join(cacheDir, 'collector/spring-security/build/antora-resources-2')
const cache = ospath.join(cacheDir, 'collector-cache/spring-security/2c4fb2f-1.0.0')
const zipFileName = ospath.join(
playbookDir,
'build/antora/inject-collector-cache-config-extension/.cache/2c4fb2f-1.0.0.zip'
)
const expected = {
collector: [
{
scan: {
dir: './build/antora-resources',
},
},
{
scan: {
dir: './build/antora-resources-2',
},
},
{
run: {
command: `node '${resolvedCacheScanDirIndexJs}' '${scan}' '${cache}'`,
},
},
{
run: {
command: `node '${resolvedCacheScanDirIndexJs}' '${scan2}' '${cache}'`,
},
},
],
}
expect(actual).to.eql(expected)
expect(generatorContext.messages).to.eql([
`Unable to restore cache from ${httpServerUrl}/.cache/2c4fb2f-1.0.0.zip`,
`Configuring collector to cache '${scan}' at '${cache}'`,
`Configuring collector to cache '${scan2}' at '${cache}'`,
])
expect(fs.existsSync(zipFileName)).to.eql(false)
await generatorContext.beforePublish()
expect(fs.existsSync(zipFileName)).to.eql(true)
})
it('cache downloaded', async () => {
const zipFileName = ospath.join(
Expand Down Expand Up @@ -259,16 +318,12 @@ describe('inject-collector-cache-config-extension', () => {
const url = playbook.site.url
const scan = ospath.join(cacheDir, 'collector/spring-security/build/antora-resources')
const cache = ospath.join(cacheDir, 'collector-cache/spring-security/6ca8fb4-1.0.0')
const zipFileName = ospath.join(
playbookDir,
'build/antora/inject-collector-cache-config-extension/.cache/6ca8fb4-1.0.0.zip'
)
delete playbook.site.url
ext.register.call(generatorContext, { playbook, config: { baseCacheUrl: url } })
await generatorContext.contentAggregated({ playbook, contentAggregate })
expect(generatorContext.messages).to.eql([
`Unable to restore cache from ${httpServerUrl}/6ca8fb4-1.0.0.zip`,
`Configuring collector to cache '${scan}' at '${cache}' and zip the results at '${zipFileName}'`,
`Configuring collector to cache '${scan}' at '${cache}'`,
])
})
})
Expand Down

0 comments on commit 60936de

Please sign in to comment.