Skip to content

Commit

Permalink
Add more coverage in shadowdog-local-cache
Browse files Browse the repository at this point in the history
This adds more coverage to the shadowdog-local-cache plugin covering some
edge cases such as restoring cache from some folder that ignores some files.
  • Loading branch information
beagleknight committed Dec 12, 2024
1 parent 8724970 commit 5c0fe4f
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 36 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"prepare": "npm run build",
"prepublishOnly": "vitest run",
"test": "vitest run --silent",
"test:watch": "vitest --silent",
"test:watch": "vitest",
"coverage": "vitest run --coverage",
"lint": "eslint",
"lint:inspect-config": "eslint --inspect-config",
Expand Down
95 changes: 68 additions & 27 deletions src/plugins/shadowdog-local-cache.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import fs from 'fs-extra'
import { describe, it, beforeEach, afterEach, vi, expect } from 'vitest'
import shadowdogLocalCache from './shadowdog-local-cache'
import shadowdogLocalCache, { compressArtifact } from './shadowdog-local-cache'

describe('shadowdog local cache', () => {
const next = vi.fn(() => fs.writeFile('tmp/tests/artifacts/foo', 'foo'))
Expand Down Expand Up @@ -44,36 +44,77 @@ describe('shadowdog local cache', () => {
})

describe('when cache is present', () => {
beforeEach(() => {
fs.writeFileSync('tmp/tests/artifacts/foo', 'foo')
// TODO: Gzip things
fs.writeFileSync('tmp/tests/cache/0adeca2ac6.tar.gz', 'foo')
describe('when the artifact is a single file', () => {
beforeEach(async () => {
fs.writeFileSync('tmp/tests/artifacts/foo', 'foo')
await compressArtifact('tmp/tests/artifacts/foo', 'tmp/tests/cache/0adeca2ac6.tar.gz')
fs.rmSync('tmp/tests/artifacts', { recursive: true })
})

it('does not execute the next middleware', async () => {
await shadowdogLocalCache.middleware({
config: {
command: 'echo foo',
artifacts: [
{
output: 'tmp/tests/artifacts/foo',
},
],
tags: [],
workingDirectory: '',
},
files: [],
invalidators: {
environment: [],
files: [],
},
next,
abort: () => {},
options: {
path: 'tmp/tests/cache',
},
})
expect(next).not.toHaveBeenCalled()
expect(fs.readFileSync('tmp/tests/artifacts/foo', 'utf8')).toBe('foo')
})
})

it.skip('does not execute the next middleware', async () => {
await shadowdogLocalCache.middleware({
config: {
command: 'echo foo',
artifacts: [
{
output: 'tmp/tests/artifacts/foo',
},
],
tags: [],
workingDirectory: '',
},
files: [],
invalidators: {
environment: [],
describe('when the artifact is a folder with some files to ignore', () => {
beforeEach(async () => {
fs.writeFileSync('tmp/tests/artifacts/foo', 'foo')
fs.writeFileSync('tmp/tests/artifacts/bar', 'bar')
await compressArtifact('tmp/tests/artifacts', 'tmp/tests/cache/079138748b.tar.gz')
fs.rmSync('tmp/tests/artifacts', { recursive: true })
})

it('does not execute the next middleware', async () => {
await shadowdogLocalCache.middleware({
config: {
command: 'echo foo',
artifacts: [
{
output: 'tmp/tests/artifacts',
ignore: ['tmp/tests/artifacts/bar'],
},
],
tags: [],
workingDirectory: '',
},
files: [],
},
next,
abort: () => {},
options: {
path: 'tmp/tests/cache',
},
invalidators: {
environment: [],
files: [],
},
next,
abort: () => {},
options: {
path: 'tmp/tests/cache',
},
})
expect(next).not.toHaveBeenCalled()
expect(fs.readFileSync('tmp/tests/artifacts/foo', 'utf8')).toBe('foo')
expect(fs.existsSync('tmp/tests/artifacts/bar')).toBe(false)
})
expect(next).not.toHaveBeenCalled()
})
})
})
19 changes: 11 additions & 8 deletions src/plugins/shadowdog-local-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ import chalk from 'chalk'
import { z } from 'zod'
import { Middleware } from '.'
import { CommandConfig } from '../config'
import { logMessage } from '../utils'
import { logMessage, logError } from '../utils'

type FilterFn = (file: string) => boolean

const compressFolder = (folderPath: string, outputPath: string, filter: FilterFn) => {
export const compressArtifact = (folderPath: string, outputPath: string, filter?: FilterFn) => {
return new Promise((resolve, reject) => {
const tarStream = tar.c(
{
Expand All @@ -37,7 +37,7 @@ const compressFolder = (folderPath: string, outputPath: string, filter: FilterFn
})
}

const decompressFile = (tarGzPath: string, outputPath: string, filter: FilterFn) => {
const decompressArtifact = (tarGzPath: string, outputPath: string, filter: FilterFn) => {
return new Promise((resolve, reject) => {
fs.mkdirpSync(outputPath)

Expand Down Expand Up @@ -78,17 +78,19 @@ const restoreCache = async (
)

try {
await decompressFile(
await decompressArtifact(
cacheFilePath,
path.join(process.cwd(), artifact.output, '..'),
(filePath) => filterFn(artifact.ignore, artifact.output, filePath),
)
} catch {
} catch (error: unknown) {
logMessage(
`🚫 An error ocurred while restoring cache for artifact '${chalk.blue(
artifact.output,
)}' with id '${chalk.green(cacheFileName)}`,
)
logError(error as Error)

return artifact
}

Expand Down Expand Up @@ -218,15 +220,16 @@ const middleware: Middleware<PluginOptions> = async ({
const sourceCacheFilePath = path.join(process.cwd(), artifact.output)

try {
await compressFolder(sourceCacheFilePath, cacheFilePath, (filePath) =>
await compressArtifact(sourceCacheFilePath, cacheFilePath, (filePath) =>
filterFn(artifact.ignore, artifact.output, filePath),
)
} catch {
} catch (error: unknown) {
logMessage(
`🚫 An error ocurred while storing global cache for artifact '${
`🚫 An error ocurred while storing cache for artifact '${
artifact.output
}' with id '${chalk.green(cacheFileName)}`,
)
logError(error as Error)
}
}),
)
Expand Down
4 changes: 4 additions & 0 deletions src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,9 @@ export const logMessage = (message: string) => {
console.log(message)
}

export const logError = (error: Error) => {
console.error(new Error(error.stack))
}

export const chalkFiles = (files: string[]) =>
files.map((file) => `'${chalk.blue(file)}'`).join(', ')

0 comments on commit 5c0fe4f

Please sign in to comment.