Skip to content

Commit

Permalink
Add support for dropping content when a snapshot zip is missing
Browse files Browse the repository at this point in the history
Update the collector with a new configuration property that can be used
to drop content if a snapshot jar cannot be found.

Closes gh-12
  • Loading branch information
philwebb committed May 2, 2024
1 parent 307bf02 commit 5d0e717
Show file tree
Hide file tree
Showing 5 changed files with 210 additions and 31 deletions.
23 changes: 23 additions & 0 deletions README.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -349,3 +349,26 @@ antora:
module: my-module
path: api/java
----


=== Dealing With "HTTP 404 Not Found" Errors for Snapshots
Branches that refer to snapshot versions of zip files will encounter 404 errors if artifacts have not been published.
This is commonly encountered when a version is bumped as the git commit needs to occur before CI can run and publish zip files.

In order to deal with this situation, the Antora Zip Contents Collector extension can drop content for snapshot versions if the content cannot be found.
To enable this feature, set `on_missing_snapshot_zip` to `drop_content`:

.antora-playbook.yml
[,yaml]
----
antora:
extensions:
- require: '@springio/antora-zip-contents-collector-extension'
on_missing_snapshot_zip: drop_content
----

Content will only be dropped when _all_ of the following conditions are satisfied:

* The content is being build from a branch and not a tag
* The version number ends with `-SNAPSHOT`
* The feature has been configured by setting `on_missing_snapshot_zip` to `drop_content`
143 changes: 116 additions & 27 deletions packages/antora-zip-contents-collector-extension/lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,27 +27,40 @@ const CACHE_RETENTION = 60 * DAYS
function register ({ config, downloadLog }) {
const logger = this.getLogger('zip-contents-collector-extension')
const catalogIncludes = new Map()
this.once('contentAggregated', (contextVariables) => contentAggregated(contextVariables, config, downloadLog))
this.once('contentClassified', (contextVariables) => contentClassified(contextVariables, config, downloadLog))
this.once('uiLoaded', (contextVariables) => uiLoaded(contextVariables))
this.once('contentAggregated', (contextVariables) =>
contentAggregated.call(this, contextVariables, config, downloadLog)
)
this.once('contentClassified', (contextVariables) =>
contentClassified.call(this, contextVariables, config, downloadLog)
)
this.once('uiLoaded', (contextVariables) => uiLoaded.call(this, contextVariables))

async function contentAggregated ({ playbook, contentAggregate }, config, downloadLog) {
logger.trace('Checking content aggregate for zip contents collector includes')
const collectorCacheDir = await getCollectorCacheDir(playbook)
logger.trace(`Using cache dir ${collectorCacheDir}`)
const componentVersionBucketsToDrop = []
// First apply content-aggregate includes since they may update the version
for (const componentVersionBucket of contentAggregate) {
for (const origin of componentVersionBucket.origins) {
const includes = getIncludes(
config,
origin,
(include) => !include.destination || include.destination.toLowerCase() === 'content-aggregate'
)
if (includes.length > 0) {
logger.trace(`Adding '${origin.refname}' aggregate includes ${includes.map((include) => include.name)}`)
const version = await readVersion(origin, config.versionFile)
await addIncludes(config, downloadLog, collectorCacheDir, version, includes, (include, zipFile, file) =>
addToContentAggregate(componentVersionBucket, include, zipFile, file)
const version = await readVersion(origin, config.versionFile)
try {
await addContentAggregateIncludes(
config,
downloadLog,
origin,
version,
collectorCacheDir,
componentVersionBucket
)
} catch (error) {
handleContentAggregatedError(
config,
origin,
version,
componentVersionBucket,
error,
componentVersionBucketsToDrop
)
}
}
Expand All @@ -56,20 +69,96 @@ function register ({ config, downloadLog }) {
for (const componentVersionBucket of contentAggregate) {
const key = componentVersionBucket.version + '@' + componentVersionBucket.name
for (const origin of componentVersionBucket.origins) {
const includes = getIncludes(
config,
origin,
(include) => include.destination && include.destination.toLowerCase() === 'content-catalog'
)
if (includes.length > 0) {
logger.trace(
`Storing '${origin.refname}' content includes [${includes.map((include) => include.name)}] under '${key}'`
const version = await readVersion(origin, config.versionFile)
try {
await collectContentCatalogIncludes(config, downloadLog, collectorCacheDir, origin, version, key)
} catch (error) {
handleContentAggregatedError(
config,
origin,
version,
componentVersionBucket,
error,
componentVersionBucketsToDrop
)
const includesForKey = (catalogIncludes.has(key) ? catalogIncludes : catalogIncludes.set(key, [])).get(key)
includesForKey.push(...includes)
}
}
}
if (componentVersionBucketsToDrop.length > 0) {
const updatedContentAggregate = contentAggregate.filter(
(candidate) => !componentVersionBucketsToDrop.includes(candidate)
)
this.updateVariables({ contentAggregate: updatedContentAggregate })
}
}

async function addContentAggregateIncludes (
config,
downloadLog,
origin,
version,
collectorCacheDir,
componentVersionBucket
) {
const includes = getIncludes(
config,
origin,
(include) => !include.destination || include.destination.toLowerCase() === 'content-aggregate'
)
if (includes.length > 0) {
logger.trace(`Adding '${origin.refname}' aggregate includes ${includes.map((include) => include.name)}`)
await doWithIncludes(config, downloadLog, collectorCacheDir, version, includes, (include, zipFile, file) =>
addToContentAggregate(componentVersionBucket, include, zipFile, file)
)
}
}

async function collectContentCatalogIncludes (config, downloadLog, collectorCacheDir, origin, version, key) {
const includes = getIncludes(
config,
origin,
(include) => include.destination && include.destination.toLowerCase() === 'content-catalog'
)
logger.trace(`Collecting '${origin.refname}' content catalog includes ${includes.map((include) => include.name)}`)
await doWithIncludes(config, downloadLog, collectorCacheDir, version, includes, (include, zipFile, file) =>
logger.trace(`Prepared ${file.path} for addition to content catalog`)
)
if (includes.length > 0) {
logger.trace(
`Storing '${origin.refname}' content includes [${includes.map((include) => include.name)}] under '${key}'`
)
const includesForKey = (catalogIncludes.has(key) ? catalogIncludes : catalogIncludes.set(key, [])).get(key)
includesForKey.push(...includes)
}
}

function handleContentAggregatedError (
config,
origin,
version,
componentVersionBucket,
error,
componentVersionBucketsToDrop
) {
if (config.onMissingSnapshotZip === 'drop_content') {
logger.trace(`Considering if '${origin.refname}' content can be dropped`)
if (origin.reftype === 'branch' && version && version.endsWith('-SNAPSHOT') && isHttpNotFoundError(error)) {
logger.trace(`Dropping '${origin.refname}' content for due to HTTP not found error`)
componentVersionBucketsToDrop.push(componentVersionBucket)
return
}
}
throw error
}

function isHttpNotFoundError (error) {
if (error && error.name === 'HTTPError' && error.statusCode === 404) {
return true
}
if (error instanceof AggregateError) {
return error.errors.every((candidate) => isHttpNotFoundError(candidate))
}
return false
}

async function contentClassified ({ playbook, contentCatalog }, config, downloadLog) {
Expand All @@ -80,7 +169,7 @@ function register ({ config, downloadLog }) {
const includes = catalogIncludes.get(key)
if (includes && includes.length > 0) {
logger.trace(`Adding '${key}' content includes [${includes.map((include) => include.name)}]`)
await addIncludes(
await doWithIncludes(
config,
downloadLog,
collectorCacheDir,
Expand Down Expand Up @@ -138,12 +227,12 @@ function register ({ config, downloadLog }) {
return includes
}

async function addIncludes (config, downloadLog, collectorCacheDir, version, includes, action) {
async function doWithIncludes (config, downloadLog, collectorCacheDir, version, includes, action) {
for (const include of includes) {
const { name, origin } = include
const versionClassification = classifyVersion(version)
logger.trace(
`Adding zip contents include '${name}' to ${origin.reftype} '${origin.refname}'${
`Processing zip contents include '${name}' to ${origin.reftype} '${origin.refname}'${
version ? ' (' + version + ')' : ''
}`
)
Expand Down Expand Up @@ -309,7 +398,7 @@ function register ({ config, downloadLog }) {
if (response.statusCode !== 200) {
const message = `Unable to download '${url}' due to HTTP response code ${response.statusCode} (${response.statusMessage})`
logger.trace(message)
throw Object.assign(new Error(message), { name: 'HTTPError' })
throw Object.assign(new Error(message), { name: 'HTTPError', statusCode: response.statusCode })
}
await fsp.writeFile(file, contents)
await fsp.writeFile(cacheFile, JSON.stringify(cache))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
name: test
version: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
name=thing
version=1.2.3-SNAPSHOT
otherstuff=things
Original file line number Diff line number Diff line change
Expand Up @@ -810,6 +810,9 @@ describe('zip contents collector extension', () => {
})

it('should remove older files from cache dir after run', async () => {
const extensionConfig = () => ({
locations: [{ url: `http://localhost:${httpServerPort}/\${name}.zip` }],
})
const cacheDir = getCollectorCacheDir()
let fd
const newerFile = ospath.join(cacheDir, 'newerfile')
Expand All @@ -823,7 +826,7 @@ describe('zip contents collector extension', () => {
await fsp.utimes(olderFile, time, time)
expect(newerFile).to.be.a.path()
expect(olderFile).to.be.a.path()
await runScenario({ repoName: 'test-at-root' })
await runScenario({ repoName: 'test-at-root', extensionConfig })
expect(newerFile).to.be.a.path()
expect(olderFile).to.not.be.a.path()
})
Expand Down Expand Up @@ -920,6 +923,46 @@ describe('zip contents collector extension', () => {
).to.throw('Error unzipping')
})

it('should drop content when 404 on branch when configured to do so', async () => {
const extensionConfig = () => ({
versionFile: 'gradle.properties',
onMissingSnapshotZip: 'drop_content',
locations: [{ url: `http://localhost:${httpServerPort}/v\${version}/\${name}.zip` }],
})
const componentConfig = { include: ['start-page'] }
await runScenario({
repoName: 'test-gradle-snapshot-version-file-at-root',
tags: ['v1.2.2'],
extensionConfig,
componentConfig,
zipFiles: ['start-page'],
httpPath: '/v1.2.2',
afterGit: async ({ repo }) => {
await git.branch({ ...repo, ref: 'update' })
await git.checkout({ ...repo, ref: 'update' })
const gradleProperties = ospath.join(repo.dir, 'gradle.properties')
let content = await fsp.readFile(gradleProperties, 'utf8')
content = content.replace(/1.2.3-SNAPSHOT/g, '1.2.2')
await fsp.writeFile(gradleProperties, content, 'utf8')
await git.add({ ...repo, filepath: 'gradle.properties' })
await git.commit({
...repo,
author: { name: 'Tester', email: '[email protected]' },
message: 'update version',
})
await git.tag({ ...repo, ref: 'v1.2.2', force: true })
await git.deleteBranch({ ...repo, ref: 'update' })
console.log('Hello')
},
after: ({ contentAggregate }) => {
expect(contentAggregate).to.have.lengthOf(1)
expect(contentAggregate[0].files).to.have.lengthOf(2)
const page = contentAggregate[0].files.find((it) => it.src.path === 'modules/ROOT/pages/index.adoc')
expect(page).to.be.exist()
},
})
})

async function runScenario ({
repoName,
branches,
Expand All @@ -934,6 +977,7 @@ describe('zip contents collector extension', () => {
downloadLog,
times = 1,
descriptorVersion = '1.0',
afterGit,
before,
after,
}) {
Expand All @@ -942,7 +986,7 @@ describe('zip contents collector extension', () => {
if (httpPath) {
;[httpServer, httpServerPort] = await startHttpServer(httpPath, zipDir, httpUsers)
}
const repo = await createRepository({ repoName, branches, tags, startPath, componentConfig })
const repo = await createRepository({ repoName, branches, tags, startPath, componentConfig, afterGit })
const playbook = {
runtime: { cacheDir: CACHE_DIR, quiet: true },
content: {
Expand All @@ -956,7 +1000,7 @@ describe('zip contents collector extension', () => {
],
},
}
const contentAggregate = await aggregateContent(playbook)
let contentAggregate = await aggregateContent(playbook)
const contentCatalog = new ContentCatalog()
const uiCatalog = new UiCatalog()
const descriptor = { name: 'test', version: descriptorVersion }
Expand All @@ -972,6 +1016,9 @@ describe('zip contents collector extension', () => {
})
for (let index = 0; index < times; index++) {
await generatorContext.contentAggregated({ playbook, contentAggregate })
if (generatorContext?.variables?.contentAggregate) {
contentAggregate = generatorContext.variables.contentAggregate
}
await generatorContext.contentClassified({ playbook, contentCatalog })
await generatorContext.uiLoaded({ uiCatalog })
}
Expand All @@ -981,7 +1028,15 @@ describe('zip contents collector extension', () => {
}
}

async function createRepository ({ repoName, fixture = repoName, branches, tags, startPath, componentConfig }) {
async function createRepository ({
repoName,
fixture = repoName,
branches,
tags,
startPath,
componentConfig,
afterGit,
}) {
const repo = { dir: ospath.join(REPOS_DIR, repoName), fs }
const links = []
const captureLinks = function (src, dest) {
Expand Down Expand Up @@ -1026,6 +1081,10 @@ describe('zip contents collector extension', () => {
for (const tag of tags) await git.tag({ ...repo, ref: tag })
}
repo.url = `http://localhost:${gitServerPort}/${repoName}/.git`
if (afterGit) {
const afterGitParams = { repo }
isAsync(afterGit) ? await afterGit(afterGitParams) : afterGit(afterGitParams)
}
return repo
}

Expand All @@ -1052,6 +1111,9 @@ describe('zip contents collector extension', () => {
this[eventName] = fn
},
getLogger: logger.getLogger,
updateVariables (variables) {
this.variables = variables
},
})

const getCollectorCacheDir = () => {
Expand Down

0 comments on commit 5d0e717

Please sign in to comment.