-
Notifications
You must be signed in to change notification settings - Fork 37
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
initial commit for saf cli int grype (#2887)
* initial commit for saf cli int grype * lint and readme * small QOL changes
- Loading branch information
1 parent
793aa41
commit c5bce30
Showing
13 changed files
with
559,954 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
import {Command, Flags} from '@oclif/core' | ||
import fs from 'fs' | ||
import {AnchoreGrypeMapper as Mapper} from '@mitre/hdf-converters' | ||
import {checkSuffix} from '../../utils/global' | ||
|
||
export default class AnchoreGrype2HDF extends Command { | ||
static usage = 'convert anchoregrype2hdf -i <anchoregrype-json> -o <hdf-scan-results-json>' | ||
|
||
static description = 'Translate a Anchore Grype output file into an HDF results set' | ||
|
||
static examples = ['saf convert anchoregrype2hdf -i anchoregrype.json -o output-hdf-name.json'] | ||
|
||
static flags = { | ||
help: Flags.help({char: 'h'}), | ||
input: Flags.string({char: 'i', required: true, description: 'Input Anchore Grype file'}), | ||
output: Flags.string({char: 'o', required: true, description: 'Output HDF file'}), | ||
'with-raw': Flags.boolean({char: 'w', required: false}), | ||
} | ||
|
||
async run() { | ||
const {flags} = await this.parse(AnchoreGrype2HDF) | ||
const input = fs.readFileSync(flags.input, 'utf8') | ||
|
||
const converter = new Mapper(input, flags['with-raw']) | ||
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2)) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,188 @@ | ||
import { expect, test } from "@oclif/test"; | ||
import tmp from "tmp"; | ||
import path from "path"; | ||
import fs from "fs"; | ||
import { omitHDFChangingFields } from "../utils"; | ||
|
||
describe("Test anchore grype", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/anchore_grype.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/anchore-grype-hdf.json`, | ||
]) | ||
.it("hdf-converter output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/anchore-grype-hdf.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/anchore-grype-hdf.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); | ||
|
||
describe("Test anchore grype withraw flag", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/anchore_grype.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/anchore-grype-withraw.json`, | ||
"-w", | ||
]) | ||
.it("hdf-converter withraw output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/anchore-grype-withraw.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/anchore-grype-withraw.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); | ||
|
||
describe("Test amazon anchore grype", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/amazon.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/amazon-grype-hdf.json`, | ||
]) | ||
.it("hdf-converter output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/amazon-grype-hdf.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/amazon-grype-hdf.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); | ||
|
||
describe("Test amazon anchore grype withraw flag", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/amazon.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/amazon-grype-withraw.json`, | ||
"-w", | ||
]) | ||
.it("hdf-converter withraw output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/amazon-grype-withraw.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/amazon-grype-withraw.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); | ||
|
||
describe("Test tensorflow anchore grype", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/tensorflow.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/tensorflow-grype-hdf.json`, | ||
]) | ||
.it("hdf-converter output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/tensorflow-grype-hdf.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/tensorflow-grype-hdf.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); | ||
|
||
describe("Test tensorflow anchore grype withraw flag", () => { | ||
const tmpobj = tmp.dirSync({ unsafeCleanup: true }); | ||
|
||
test | ||
.stdout() | ||
.command([ | ||
"convert anchoregrype2hdf", | ||
"-i", | ||
path.resolve( | ||
"./test/sample_data/anchoregrype/sample_input_report/tensorflow.json" | ||
), | ||
"-o", | ||
`${tmpobj.name}/tensorflow-grype-withraw.json`, | ||
"-w", | ||
]) | ||
.it("hdf-converter withraw output test", () => { | ||
const converted = JSON.parse( | ||
fs.readFileSync(`${tmpobj.name}/tensorflow-grype-withraw.json`, "utf8") | ||
); | ||
const sample = JSON.parse( | ||
fs.readFileSync( | ||
path.resolve("./test/sample_data/anchoregrype/tensorflow-grype-withraw.json"), | ||
"utf8" | ||
) | ||
); | ||
expect(omitHDFChangingFields(converted)).to.eql( | ||
omitHDFChangingFields(sample) | ||
); | ||
}); | ||
}); |
Oops, something went wrong.