diff --git a/ext/vulnsrc/redhat/redhat.go b/ext/vulnsrc/redhat/redhat.go
index 5d0604c3aa..3f5431fd9b 100644
--- a/ext/vulnsrc/redhat/redhat.go
+++ b/ext/vulnsrc/redhat/redhat.go
@@ -23,6 +23,7 @@ import (
"errors"
"fmt"
"io/ioutil"
+ "reflect"
"regexp"
"strconv"
"strings"
@@ -31,40 +32,45 @@ import (
"github.com/quay/clair/v3/database"
"github.com/quay/clair/v3/ext/versionfmt/modulerpm"
"github.com/quay/clair/v3/ext/versionfmt/rpm"
+ "github.com/quay/clair/v3/ext/vulnsrc"
"github.com/quay/clair/v3/pkg/commonerr"
+ "github.com/quay/clair/v3/pkg/envutil"
"github.com/quay/clair/v3/pkg/httputil"
- "github.com/quay/clair/v3/ext/vulnsrc"
log "github.com/sirupsen/logrus"
)
const (
- // PulpV2BaseURL - base url for pulp v2 content
- PulpV2BaseURL = "https://www.redhat.com/security/data/oval/v2/"
// PulpManifest - url suffix for pulp manifest file
- PulpManifest = "PULP_MANIFEST"
+ PulpManifest = "PULP_MANIFEST"
// DbManifestEntryKeyPrefix - key prefix used to create flag for manifest entry hash key/value
DbManifestEntryKeyPrefix = "oval.v2.pulp.manifest.entry."
// DbLastAdvisoryDateKey - key prefix used to create flag for last advisory date key/value
- DbLastAdvisoryDateKey = "oval.v2.advisory.date.issued"
+ DbLastAdvisoryDateKey = "oval.v2.advisory.date.issued"
// DefaultLastAdvisoryDate - literal date (in case no existing last advisory date is found)
- DefaultLastAdvisoryDate = "1970-01-01"
+ DefaultLastAdvisoryDate = "1970-01-01"
// AdvisoryDateFormat date format for advisory dates ('magical reference date' for datetime format)
- AdvisoryDateFormat = "2006-01-02"
+ AdvisoryDateFormat = "2006-01-02"
// UpdaterFlag - key used for flag for updater
- UpdaterFlag = "RedHatOvalV2Updater"
+ UpdaterFlag = "redHatUpdater"
// UpdaterFlagDateFormat - date format for updater flag dates ('magical reference date' for datetime format)
- UpdaterFlagDateFormat = "2006-01-02 15:04:05"
+ UpdaterFlagDateFormat = "2006-01-02 15:04:05"
// AffectedType - affected type
- AffectedType = database.BinaryPackage
+ AffectedType = database.BinaryPackage
// CveURL - url for cve content
- CveURL = "https://access.redhat.com/security/cve/"
+ CveURL = "https://access.redhat.com/security/cve/"
)
+// OvalV2BaseURL - base url for oval v2 content
+var OvalV2BaseURL = envutil.GetEnv("OVAL_V2_URL", "https://www.redhat.com/security/data/oval/v2/")
+
// SupportedArches - supported architectures
-var SupportedArches = map[string]bool { "x86_64":true, "noarch":true }
+var SupportedArches = map[string]bool{"x86_64": true, "noarch": true}
// SupportedDefinitionTypes - supported definition classes
-var SupportedDefinitionTypes = map[string]bool { "patch":true }
+var SupportedDefinitionTypes = map[string]bool{"patch": true}
+
+// pendingVulnNames - quick running reference (by name) of all the vulnerabilities which have been added to the pending list
+var pendingVulnNames = map[string]bool{}
func init() {
vulnsrc.RegisterUpdater("redhat", &updater{})
@@ -75,22 +81,22 @@ func (u *updater) Clean() {}
func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateResponse, err error) {
log.WithField("package", "RedHat").Info("Start fetching vulnerabilities")
- pulpManifestBody, err := FetchPulpManifest(PulpV2BaseURL + PulpManifest)
+ pulpManifestBody, err := FetchPulpManifest(OvalV2BaseURL + PulpManifest)
if err != nil {
- log.Error("Unable to fetch pulp manifest file: " + PulpV2BaseURL + PulpManifest)
+ log.Error("Unable to fetch pulp manifest file: " + OvalV2BaseURL + PulpManifest)
return resp, err
}
log.Info("Found pulp manifest: " + pulpManifestBody)
pulpManifestEntries := ParsePulpManifest(pulpManifestBody)
- log.Info(fmt.Sprintf("Processing %d pulp manifest entries", len(pulpManifestEntries)))
+ log.Info(fmt.Sprintf("Processing %d pulp manifest entries", len(pulpManifestEntries)))
// initialize updater flags map
resp.Flags = make(map[string]string)
// walk the set of pulpManifestEntries
for _, manifestEntry := range pulpManifestEntries {
- log.Info(fmt.Sprintf("Processing manifest entry (BzipPath: %s)", manifestEntry.BzipPath))
+ log.Debug(fmt.Sprintf("Processing manifest entry (BzipPath: %s)", manifestEntry.BzipPath))
// check if this entry has already been processed (based on its sha256 hash)
if IsNewOrUpdatedManifestEntry(manifestEntry, datastore) {
unprocessedAdvisories := []ParsedAdvisory{}
@@ -98,13 +104,13 @@ func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateRespo
log.Info("Found updated/new pulp manifest entry. Processing: " + manifestEntry.BzipPath)
// unzip and read the bzip-compressed oval file into an xml string
- ovalXML, err := ReadBzipOvalFile(PulpV2BaseURL + manifestEntry.BzipPath)
+ ovalXML, err := ReadBzipOvalFile(OvalV2BaseURL + manifestEntry.BzipPath)
if err != nil {
// log error and continue
log.Error(err)
continue
}
- if (ovalXML == "") {
+ if ovalXML == "" {
log.Error("Cannot parse empty source oval doc")
continue
}
@@ -116,7 +122,7 @@ func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateRespo
log.Error(err)
continue
}
- log.Info(fmt.Sprintf("Processing %d definitions...", len(ovalDoc.DefinitionSet.Definitions)))
+ log.Debug(fmt.Sprintf("Processing %d definitions...", len(ovalDoc.DefinitionSet.Definitions)))
unprocessedAdvisories, err = GatherUnprocessedAdvisories(manifestEntry, ovalDoc, datastore)
if err != nil {
@@ -125,41 +131,53 @@ func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateRespo
continue
}
if len(unprocessedAdvisories) < 1 {
- log.Info("Successful update, no unprocessed advisories found.")
+ log.Debug("Successful update, no unprocessed advisories found.")
continue
} else {
- log.Info(fmt.Sprintf("Successful update, found %d unprocessed advisories.", len(unprocessedAdvisories)))
+ log.Debug(fmt.Sprintf("Successful update, found %d unprocessed advisories.", len(unprocessedAdvisories)))
}
log.WithFields(log.Fields{
"items": len(unprocessedAdvisories),
"updater": "RedHat",
- }).Info("Start processing advisories")
+ }).Debug("Start parsing advisories for vulnerabilities")
+
+ collectedVulnerabilities := CollectVulnerabilities(unprocessedAdvisories, ovalDoc)
+ log.WithFields(log.Fields{
+ "collectedVulns": len(collectedVulnerabilities),
+ "pendingVulns": len(resp.Vulnerabilities),
+ "manifestEntry": manifestEntry.BzipPath,
+ }).Info("Append parsed vulnerabilities to pending set")
+
+ resp.Vulnerabilities = append(resp.Vulnerabilities, collectedVulnerabilities...)
- resp.Vulnerabilities = append(resp.Vulnerabilities, CollectVulnerabilities(unprocessedAdvisories, ovalDoc)...)
+ log.Debug(fmt.Sprintf("Total pending vulns: %d", len(resp.Vulnerabilities)))
// remember the bzip hash for this entry, so we don't re-process it again next time (if unchanged)
flagKey, flagVal := ConstructFlagForManifestEntrySignature(manifestEntry, datastore)
resp.Flags[flagKey] = flagVal
+ log.Trace(fmt.Sprintf("continuing next loop..."))
} else {
// this pulp manifest entry has already been processed; log and skip it
- log.Info("Pulp manifest entry unchanged since last seen. Skipping: " + manifestEntry.BzipPath)
+ log.Debug("Pulp manifest entry unchanged since last seen. Skipping: " + manifestEntry.BzipPath)
+ continue
}
}
// debug
- log.Info(fmt.Sprintf("Updating advisory-last-checked-on date in database to: %s", time.Now().Format(AdvisoryDateFormat)))
- // update the db ky/value entry for the advisory-last-checked-on date (current timestamp, as coarse YYYY-MM-dd format)
+ log.Debug(fmt.Sprintf("Updating advisory-last-checked-on date in database to: %s", time.Now().Format(AdvisoryDateFormat)))
+ // update the db key/value entry for the updater-last-ran date (current timestamp, using "YYYY-MM-dd hh:mm:ss" format)
resp.Flags[UpdaterFlag] = time.Now().Format(UpdaterFlagDateFormat)
- resp.Flags[DbLastAdvisoryDateKey] = time.Now().Format(UpdaterFlagDateFormat)
+ // update the db key/value entry for the advisory-last-checked-on date (current timestamp, using advisory-style coarse "YYYY-MM-dd" format)
+ resp.Flags[DbLastAdvisoryDateKey] = time.Now().Format(AdvisoryDateFormat)
// update the resp flag with summary of found
if len(resp.Vulnerabilities) > 0 {
- log.WithField("package", "Red Hat").Debug(fmt.Sprintf("updating (found: %d vulnerabilities)...", len(resp.Vulnerabilities)))
+ log.WithField("package", "Red Hat").Info(fmt.Sprintf("updating (found: %d vulnerabilities)...", len(resp.Vulnerabilities)))
} else {
- log.WithField("package", "Red Hat").Debug("no update")
+ log.WithField("package", "Red Hat").Info("no update")
}
return resp, nil
@@ -189,7 +207,7 @@ func CollectVulnerabilities(advisoryDefinitions []ParsedAdvisory, ovalDoc OvalV2
// CollectVulnsForAdvisory - get the set of vulns for the given advisory (full doc must also be passed, for the states/tests/objects references)
func CollectVulnsForAdvisory(advisoryDefinition ParsedAdvisory, ovalDoc OvalV2Document) (vulnerabilities []database.VulnerabilityWithAffected) {
// first, check the advisory severity
- if (IsSignificantSeverity(advisoryDefinition.Metadata.Advisory.Severity)) {
+ if IsSignificantSeverity(advisoryDefinition.Metadata.Advisory.Severity) && IsSupportedDefinitionType(advisoryDefinition.Class) {
for _, cve := range advisoryDefinition.Metadata.Advisory.CveList {
packageMap := make(map[string]bool)
vulnerability := database.VulnerabilityWithAffected{
@@ -200,6 +218,7 @@ func CollectVulnsForAdvisory(advisoryDefinition ParsedAdvisory, ovalDoc OvalV2Do
Description: advisoryDefinition.Metadata.Description,
},
}
+
for _, parsedRmpNvra := range advisoryDefinition.PackageList {
if !IsArchSupported(parsedRmpNvra.Arch) {
continue
@@ -252,19 +271,76 @@ func CollectVulnsForAdvisory(advisoryDefinition ParsedAdvisory, ovalDoc OvalV2Do
}
}
+ log.WithFields(log.Fields{
+ "name": vulnerability.Name,
+ "packages": len(advisoryDefinition.PackageList),
+ "affected": len(vulnerability.Affected),
+ }).Trace("Append vulnerability")
if len(vulnerability.Affected) > 0 {
- vulnerabilities = append(vulnerabilities, vulnerability)
+ // check the pending vulnerabilities set for this vulnerability (since they can appear in multiple manifest entries)
+ if pendingVulnNames[cve.Value+" - "+ParseRhsaName(advisoryDefinition)] {
+ // this vuln has already been added to the set, so skip so we don't end up with duplicates
+ log.Trace(fmt.Sprintf("Filtering unique package info for already-queued vulnerability: %s",
+ cve.Value+" - "+ParseRhsaName(advisoryDefinition)))
+ // get the slice index for the existing copy of this vuln, so we can modify it instead of duplicating it
+ i := GetPendingVulnerabilitySliceIndex(vulnerabilities, vulnerability)
+ if i >= 0 {
+ // merge any new unique vuln features into the existing vuln
+ vulnerabilities[i] = MergeVulnerabilityFeature(vulnerability, vulnerabilities[i])
+ }
+ } else {
+ vulnerabilities = append(vulnerabilities, vulnerability)
+ // add it to the running reference list of pending vulnerabilities, so we don't get duplicates later
+ pendingVulnNames[vulnerability.Name] = true
+ }
}
}
} else {
// advisories with severity "None" should be skipped
- log.Trace(fmt.Sprintf("Skipping unsupported severity '%s' for advisory: %s",
+ log.Trace(fmt.Sprintf("Skipping unsupported advisory: %s (severity '%s', class: '%s')",
+ advisoryDefinition.Metadata.Title,
advisoryDefinition.Metadata.Advisory.Severity,
- advisoryDefinition.Metadata.Title))
+ advisoryDefinition.Class))
}
return
}
+// MergeVulnerabilityFeature - copy non-duplicate affected packages from sourceVuln to targetVuln, and return targetVuln
+func MergeVulnerabilityFeature(sourceVuln database.VulnerabilityWithAffected, targetVuln database.VulnerabilityWithAffected) database.VulnerabilityWithAffected {
+ for _, sourceAffectedFeature := range sourceVuln.Affected {
+ //
+ if !VulnerabilityContainsFeature(targetVuln, sourceAffectedFeature) {
+ // targetVuln doesn't contain the feature; append it
+ targetVuln.Affected = append(targetVuln.Affected, sourceAffectedFeature)
+ }
+ }
+ return targetVuln
+}
+
+// VulnerabilityContainsFeature - check whether the given vulnerability already contains the given feature
+func VulnerabilityContainsFeature(vulnerability database.VulnerabilityWithAffected, comparisonFeature database.AffectedFeature) bool {
+ for _, existingFeature := range vulnerability.Affected {
+ if reflect.DeepEqual(existingFeature, comparisonFeature) {
+ // match found
+ return true
+ }
+ }
+ // no match found
+ return false
+}
+
+// GetPendingVulnerabilitySliceIndex - get the slice index for the given vulnerability (or -1 if not present)
+func GetPendingVulnerabilitySliceIndex(vulnSet []database.VulnerabilityWithAffected, lookupVuln database.VulnerabilityWithAffected) int {
+ for i := range vulnSet {
+ if vulnSet[i].Name == lookupVuln.Name {
+ // match found
+ return i
+ }
+ }
+ // no match found
+ return -1
+}
+
// ConstructVulnerabilityIDs - construct the []VulnerabilityID set from the given advisory definition
func ConstructVulnerabilityIDs(advisoryDefinition ParsedAdvisory) []database.VulnerabilityID {
var vulnIDs []database.VulnerabilityID
@@ -288,7 +364,14 @@ func ParseCveNames(advisoryDefinition ParsedAdvisory) []string {
// ParseRhsaName - parse the RHSA name (e.g.: "RHBA-2019:2794") from the given advisory definition
func ParseRhsaName(advisoryDefinition ParsedAdvisory) string {
- return strings.TrimSpace(advisoryDefinition.Metadata.Title[:strings.Index(advisoryDefinition.Metadata.Title, ": ")])
+ var parsedName string
+ if len(advisoryDefinition.Metadata.Reference) > 0 {
+ parsedName = advisoryDefinition.Metadata.Reference[0].RefID
+ }
+ if parsedName == "" && strings.Contains(advisoryDefinition.Metadata.Title, ": ") {
+ parsedName = strings.TrimSpace(advisoryDefinition.Metadata.Title[:strings.Index(advisoryDefinition.Metadata.Title, ": ")])
+ }
+ return parsedName
}
// ParseVulnerabilityNamespace - parse the namespace from the given advisory definition
@@ -337,8 +420,8 @@ func GetSeverity(severity string) database.Severity {
// IsSignificantSeverity - checks whether the given severity is significant (used to determine whether vulns will be parsed and stored for it)
func IsSignificantSeverity(severity string) bool {
switch strings.Title(strings.ToLower(severity)) {
- case "None":
- return false
+ case "None":
+ return false
default:
// anything else is considered significant
return true
@@ -346,18 +429,18 @@ func IsSignificantSeverity(severity string) bool {
}
func extractAllCriterions(criteria OvalV2Criteria) []OvalV2Criterion {
- var criterions []OvalV2Criterion
- for _, criterion := range criteria.Criteria {
- // recursively append criteria contents
- criterions = append(criterions, extractAllCriterions(criterion)...)
- }
- for _, criterion := range criteria.Criterion {
- if (IsRelevantCriterion(criterion)) {
+ var criterions []OvalV2Criterion
+ for _, criterion := range criteria.Criteria {
+ // recursively append criteria contents
+ criterions = append(criterions, extractAllCriterions(criterion)...)
+ }
+ for _, criterion := range criteria.Criterion {
+ if IsRelevantCriterion(criterion) {
// append criterion
criterions = append(criterions, criterion)
}
- }
- return criterions
+ }
+ return criterions
}
// IsRelevantCriterion - check whether the given criterion is relevant
@@ -365,6 +448,8 @@ func IsRelevantCriterion(criterion OvalV2Criterion) bool {
// check comment for matching "is earlier than" substring
if strings.Contains(criterion.Comment, "is earlier than") {
return true
+ } else if strings.HasPrefix(criterion.Comment, "Module ") && strings.HasSuffix(criterion.Comment, " is enabled") {
+ return true
}
// nothing matched
return false
@@ -373,7 +458,7 @@ func IsRelevantCriterion(criterion OvalV2Criterion) bool {
// IsArchSupported - check whether the given architecture regex represents a supported arch
func IsArchSupported(archRegex string) bool {
// treat empty arch package info as noarch
- if (archRegex == "") {
+ if archRegex == "" {
return SupportedArches["noarch"]
}
// arch values may be simple strings (e.g.: "x86_64") or regex pattern-based (e.g.: "aarch64|ppc64le|s390x|x86_64")
@@ -381,7 +466,7 @@ func IsArchSupported(archRegex string) bool {
// walk the supported arches map, to see if there's a match to the regex
for archName, isSupported := range SupportedArches {
isMatch := archMatcher.MatchString(archName)
- if (isMatch && isSupported) {
+ if isMatch && isSupported {
return true
}
}
@@ -397,12 +482,14 @@ func IsSupportedDefinitionType(defClass string) bool {
// ParseCpeNamesFromAffectedCpeList - parse affected_cpe_list
func ParseCpeNamesFromAffectedCpeList(affectedCpeList OvalV2Cpe) ([]string, error) {
var cpeNames []string
- if affectedCpeList.Cpe == nil || len(affectedCpeList.Cpe) < 2 {
+ if affectedCpeList.Cpe == nil {
return cpeNames, errors.New("unparseable affected cpe list")
}
- // return all cpe entries from the list
+ // return all non-empty cpe entries from the list
for i := 0; i < len(affectedCpeList.Cpe); i++ {
- cpeNames = append(cpeNames, affectedCpeList.Cpe[i])
+ if affectedCpeList.Cpe[i] != "" {
+ cpeNames = append(cpeNames, affectedCpeList.Cpe[i])
+ }
}
return cpeNames, nil
}
@@ -413,15 +500,21 @@ func ProcessAdvisoriesSinceLastDbUpdate(ovalDoc OvalV2Document, datastore databa
var advisories []ParsedAdvisory
for _, definition := range ovalDoc.DefinitionSet.Definitions {
// check whether this is a supported definition type
- if (!IsSupportedDefinitionType(definition.Class)) {
+ if !IsSupportedDefinitionType(definition.Class) {
// not supported; skip it
+ log.Trace(fmt.Sprintf("Skipping unsupported definition (id: %s, class: %s)",
+ definition.ID,
+ definition.Class))
continue
}
+ log.Trace(fmt.Sprintf("Processing definition (id: %s, class: %s)",
+ definition.ID,
+ definition.Class))
// check if this entry has already been processed (based on its issued date)
if IsAdvisorySinceDate(sinceDate, definition.Metadata.Advisory.Issued.Date) {
// this advisory was issued since the last advisory date in the database; add it
// debug
- log.Info(fmt.Sprintf("Found advisory issued since the last known advisory date (%s) in database: %s (%s)",
+ log.Trace(fmt.Sprintf("Found advisory issued since the last known advisory date (%s) in database: %s (%s)",
sinceDate,
definition.Metadata.Title, definition.Metadata.Advisory.Issued.Date))
advisories = append(advisories, ParseAdvisory(definition, ovalDoc))
@@ -430,10 +523,10 @@ func ProcessAdvisoriesSinceLastDbUpdate(ovalDoc OvalV2Document, datastore databa
// advisory date is coarse (YYYY-MM-dd format) date only,
// so it's possible that we'll see an advisory multiple times within the same day;
// check the db in this case to be sure
- if (!DbLookupIsAdvisoryProcessed(parsedAdvisory, datastore)) {
+ if !DbLookupIsAdvisoryProcessed(parsedAdvisory, datastore) {
// this advisory id/version hasn't been processed yet; add it
// debug
- log.Info(fmt.Sprintf("Found unprocessed advisory issued on the last known advisory date (%s) in database: %s (%s)",
+ log.Trace(fmt.Sprintf("Found unprocessed advisory issued on the last known advisory date (%s) in database: %s (%s)",
sinceDate,
definition.Metadata.Title, definition.Metadata.Advisory.Issued.Date))
advisories = append(advisories, parsedAdvisory)
@@ -441,26 +534,26 @@ func ProcessAdvisoriesSinceLastDbUpdate(ovalDoc OvalV2Document, datastore databa
} else {
// this advisory was issued before the last advisory date in the database, so already processed; skip it
// debug
- log.Info(fmt.Sprintf("Skipping advisory issued before the last known advisory date (%s) in database: %s (%s)",
+ log.Trace(fmt.Sprintf("Skipping advisory issued before the last known advisory date (%s) in database: %s (%s)",
sinceDate,
definition.Metadata.Title, definition.Metadata.Advisory.Issued.Date))
}
}
// debug-only info
out, _ := xml.MarshalIndent(ovalDoc, " ", " ")
- log.Debug(string(out))
+ log.Trace(string(out))
return advisories, nil
}
// ParseAdvisory - parse the given advisory definition
-func ParseAdvisory(definition OvalV2AdvisoryDefinition, ovalDoc OvalV2Document) (ParsedAdvisory) {
+func ParseAdvisory(definition OvalV2AdvisoryDefinition, ovalDoc OvalV2Document) ParsedAdvisory {
parsedAdvisory := ParsedAdvisory{
- Class: definition.Class,
- ID: definition.ID,
- Version: definition.Version,
- Metadata: definition.Metadata,
- Criteria: definition.Criteria,
+ Class: definition.Class,
+ ID: definition.ID,
+ Version: definition.Version,
+ Metadata: definition.Metadata,
+ Criteria: definition.Criteria,
PackageList: GetPackageList(definition.Criteria, ovalDoc),
}
return parsedAdvisory
@@ -469,21 +562,42 @@ func ParseAdvisory(definition OvalV2AdvisoryDefinition, ovalDoc OvalV2Document)
// GetPackageList - get the package list associated with the given criteria
func GetPackageList(criteria OvalV2Criteria, ovalDoc OvalV2Document) (parsedNvras []ParsedRmpNvra) {
criterions := extractAllCriterions(criteria)
+ var duplicatePackageCount int = 0
for _, criterion := range criterions {
// get package info
parsedRpmNvra := FindPackageNvraInfo(criterion.TestRef, ovalDoc)
// only include parsed nvra data if non-empty
- if (parsedRpmNvra.Evr != "") {
- parsedNvras = append(parsedNvras, parsedRpmNvra)
+ if parsedRpmNvra.Evr != "" {
+ // make sure parsedNvras doesn't already contain parsedRpmNvra
+ if !ParsedNvrasContains(parsedNvras, parsedRpmNvra) {
+ // new/unique, add it
+ parsedNvras = append(parsedNvras, parsedRpmNvra)
+ } else {
+ duplicatePackageCount++
+ }
}
}
+ // debug
+ if duplicatePackageCount > 0 {
+ log.Debug(fmt.Sprintf("skipped duplicate packages: %d", duplicatePackageCount))
+ }
return
}
+// ParsedNvrasContains - determine whether the given parsedNvras slice already contains the given nvra
+func ParsedNvrasContains(parsedNvras []ParsedRmpNvra, nvra ParsedRmpNvra) bool {
+ for _, parsedNvra := range parsedNvras {
+ if parsedNvra == nvra {
+ return true
+ }
+ }
+ return false
+}
+
// FindPackageNvraInfo - get nvra info for the given test ref
func FindPackageNvraInfo(testRefID string, ovalDoc OvalV2Document) ParsedRmpNvra {
var parsedNvra ParsedRmpNvra
- for _, test := range ovalDoc.TestSet.Tests {
+ for _, test := range ovalDoc.TestSet.Tests {
if test.ID == testRefID {
for _, obj := range ovalDoc.ObjectSet.Objects {
if obj.ID == test.ObjectRef.Ref {
@@ -491,7 +605,7 @@ func FindPackageNvraInfo(testRefID string, ovalDoc OvalV2Document) ParsedRmpNvra
}
}
for _, state := range ovalDoc.StateSet.States {
- if (state.ID == test.StateRef.Ref) {
+ if state.ID == test.StateRef.Ref {
parsedNvra.Evr = state.Evr.Value
parsedNvra.Arch = state.Arch.Value
}
@@ -507,14 +621,14 @@ func IsAdvisorySinceDate(sinceDate string, advisoryDate string) bool {
sinceDate = DefaultLastAdvisoryDate
}
sinceTime, err := time.Parse(AdvisoryDateFormat, sinceDate)
- if err != nil {
+ if err != nil {
log.Error("error parsing since date string: " + sinceDate)
// if unable to parse date, treat as new advisory
return true
}
advisoryTime, err := time.Parse(AdvisoryDateFormat, advisoryDate)
- if err != nil {
- log.Error("error parsing advisory date string: " + advisoryDate)
+ if err != nil {
+ log.Error("error parsing advisory date string: " + advisoryDate)
// if unable to parse date, treat as new advisory
return true
}
@@ -527,14 +641,14 @@ func IsAdvisorySameDate(sinceDate string, advisoryDate string) bool {
sinceDate = DefaultLastAdvisoryDate
}
sinceTime, err := time.Parse(AdvisoryDateFormat, sinceDate)
- if err != nil {
- log.Error("error parsing since date string: " + sinceDate)
+ if err != nil {
+ log.Error("error parsing since date string: " + sinceDate)
// if unable to parse date, treat as not same
return false
}
advisoryTime, err := time.Parse(AdvisoryDateFormat, advisoryDate)
- if err != nil {
- log.Error("error parsing advisory date string: " + advisoryDate)
+ if err != nil {
+ log.Error("error parsing advisory date string: " + advisoryDate)
// if unable to parse date, treat as not same
return false
}
@@ -549,7 +663,7 @@ func DbLookupLastAdvisoryDate(datastore database.Datastore) string {
// error while fetching record, use default
return DefaultLastAdvisoryDate
}
- if (ok == false || dbLastAdvisoryDate == "") {
+ if ok == false || dbLastAdvisoryDate == "" {
// no record found, use default
return DefaultLastAdvisoryDate
}
@@ -585,7 +699,7 @@ func ConstructFlagForManifestEntrySignature(manifestEntry ManifestEntry, datasto
// since the last time the manifest was processed
func IsNewOrUpdatedManifestEntry(manifestEntry ManifestEntry, datastore database.Datastore) bool {
currentDbSignature, ok, err := database.FindKeyValueAndRollback(datastore,
- DbManifestEntryKeyPrefix + manifestEntry.BzipPath)
+ DbManifestEntryKeyPrefix+manifestEntry.BzipPath)
if err != nil {
// log the error and err on the side of treat-as-new/updated
log.Error("Unable to fetch advisory signature from db, caused by: " + err.Error())
@@ -696,8 +810,8 @@ func ReadBzipOvalFile(bzipOvalFile string) (string, error) {
}
// ParseCriteriaForModuleNamespaces - parse one definition
-func ParseCriteriaForModuleNamespaces(criteria OvalV2Criteria) ([]string) {
- var moduleNamespaces []string
+func ParseCriteriaForModuleNamespaces(criteria OvalV2Criteria) []string {
+ var moduleNamespaces []string
criterions := extractAllCriterions(criteria)
// walk the criteria and add them
for _, criterion := range criterions {
@@ -708,6 +822,5 @@ func ParseCriteriaForModuleNamespaces(criteria OvalV2Criteria) ([]string) {
moduleNamespaces = append(moduleNamespaces, matches[2])
}
}
- return moduleNamespaces
+ return moduleNamespaces
}
-
diff --git a/ext/vulnsrc/redhat/redhat_test.go b/ext/vulnsrc/redhat/redhat_test.go
index 6fbe758fe1..1eb01d0597 100644
--- a/ext/vulnsrc/redhat/redhat_test.go
+++ b/ext/vulnsrc/redhat/redhat_test.go
@@ -109,11 +109,11 @@ func TestIsRelevantCriterion(t *testing.T) {
args args
want bool
}{
- {"1", args{OvalV2Criterion{Comment:"softhsm-devel is earlier than 0:2.4.0-2.module+el8.1.0+4098+f286395e"}}, true},
- {"2", args{OvalV2Criterion{Comment:"Red Hat Enterprise Linux must be installed"}}, false},
- {"3", args{OvalV2Criterion{Comment:"Module idm:DL1 is enabled"}}, false},
- {"4", args{OvalV2Criterion{Comment:"softhsm-devel is signed with Red Hat redhatrelease2 key"}}, false},
- {"5", args{OvalV2Criterion{Comment:""}}, false},
+ {"1", args{OvalV2Criterion{Comment: "softhsm-devel is earlier than 0:2.4.0-2.module+el8.1.0+4098+f286395e"}}, true},
+ {"2", args{OvalV2Criterion{Comment: "Module idm:DL1 is enabled"}}, true},
+ {"3", args{OvalV2Criterion{Comment: "Red Hat Enterprise Linux must be installed"}}, false},
+ {"4", args{OvalV2Criterion{Comment: "softhsm-devel is signed with Red Hat redhatrelease2 key"}}, false},
+ {"5", args{OvalV2Criterion{Comment: ""}}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -148,6 +148,11 @@ func TestIsSupportedDefinitionType(t *testing.T) {
if got := IsSupportedDefinitionType(tt.args.arch); got != tt.want {
t.Errorf("IsSupportedDefinitionType(%v) = %v, want %v", tt.args.arch, got, tt.want)
}
+ // debug
+ log.Info(fmt.Sprintf("!IsSupportedDefinitionType(%s)", tt.args.arch))
+ if got := !IsSupportedDefinitionType(tt.args.arch); got != !tt.want {
+ t.Errorf("!IsSupportedDefinitionType(%v) = %v, want %v", tt.args.arch, got, !tt.want)
+ }
})
}
}
@@ -314,7 +319,7 @@ func TestReadBzipOvalFile(t *testing.T) {
func TestParseCpeNamesFromAffectedCpeList(t *testing.T) {
pwd, _ := os.Getwd()
- xmlFilePath := pwd + "/testdata/v2/ansible-2.8.oval.xml"
+ xmlFilePath := pwd + "/testdata/v2/ansible-1.x.oval.xml"
xmlContent, err := ioutil.ReadFile(xmlFilePath)
if err != nil {
log.Fatal("error reading " + xmlFilePath)
@@ -336,24 +341,39 @@ func TestParseCpeNamesFromAffectedCpeList(t *testing.T) {
want []string
wantErr bool
}{
- // cpe:/a:redhat:ansible_engine:2.8::el8
{
- "1",
+ "Two cpes",
args{ovalDoc.DefinitionSet.Definitions[0].Metadata.Advisory.AffectedCpeList},
[]string{
"cpe:/a:redhat:ansible_engine:2.8",
"cpe:/a:redhat:ansible_engine:2.8::el8",
- // []CpeName{
- // {Part: "a", Vendor: "redhat", Product: "ansible_engine", Version: "2.8", Update: "", Edition: "el8", Language: ""},
},
false,
},
+ {
+ "With one empty cpe",
+ args{ovalDoc.DefinitionSet.Definitions[1].Metadata.Advisory.AffectedCpeList},
+ []string{
+ "cpe:/a:redhat:ansible_engine:2.8",
+ "cpe:/a:redhat:ansible_engine:2.8::el8",
+ },
+ false,
+ },
+ {
+ "No cpe (unparseable)",
+ args{ovalDoc.DefinitionSet.Definitions[2].Metadata.Advisory.AffectedCpeList},
+ []string{},
+ true,
+ },
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := ParseCpeNamesFromAffectedCpeList(tt.args.affectedCpeList)
- if (err != nil) != tt.wantErr {
- t.Errorf("ParseCpeNamesFromAffectedCpeList() error = %v, wantErr %v", err, tt.wantErr)
+ if err != nil {
+ if (err != nil) != tt.wantErr {
+ t.Errorf("ParseCpeNamesFromAffectedCpeList() error = %v, wantErr %v", err, tt.wantErr)
+ }
+ // expected error, no need to continue
return
}
if !reflect.DeepEqual(got, tt.want) {
@@ -372,36 +392,36 @@ func TestIsSignificantSeverity(t *testing.T) {
args args
want bool
}{
- {"None", args{"None"},false},
- {"Low", args{"Low"},true},
- {"Moderate", args{"Moderate"},true},
- {"Important", args{"Important"},true},
- {"Critical", args{"Critical"},true},
- {"Unknown", args{"Unknown"},true},
- }
- for _, tt := range tests {
+ {"None", args{"None"}, false},
+ {"Low", args{"Low"}, true},
+ {"Moderate", args{"Moderate"}, true},
+ {"Important", args{"Important"}, true},
+ {"Critical", args{"Critical"}, true},
+ {"Unknown", args{"Unknown"}, true},
+ }
+ for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := IsSignificantSeverity(tt.args.severity); got != tt.want {
- t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
- tt.args.severity,
- strings.Title(tt.args.severity),
- got,
+ t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
+ tt.args.severity,
+ strings.Title(tt.args.severity),
+ got,
tt.want)
}
// test as all uppercase
if got := IsSignificantSeverity(strings.ToUpper(tt.args.severity)); got != tt.want {
- t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
- strings.ToUpper(tt.args.severity),
- strings.Title(strings.ToUpper(tt.args.severity)),
- got,
+ t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
+ strings.ToUpper(tt.args.severity),
+ strings.Title(strings.ToUpper(tt.args.severity)),
+ got,
tt.want)
}
// test as all lowercase
if got := IsSignificantSeverity(strings.ToLower(tt.args.severity)); got != tt.want {
- t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
- strings.ToLower(tt.args.severity),
- strings.Title(strings.ToLower(tt.args.severity)),
- got,
+ t.Errorf("IsSignificantSeverity(%s->%s) = %v, want %v",
+ strings.ToLower(tt.args.severity),
+ strings.Title(strings.ToLower(tt.args.severity)),
+ got,
tt.want)
}
})
@@ -417,42 +437,615 @@ func TestGetSeverity(t *testing.T) {
args args
want database.Severity
}{
- {"None", args{"None"},database.NegligibleSeverity},
- {"Low", args{"Low"},database.LowSeverity},
- {"Moderate", args{"Moderate"},database.MediumSeverity},
- {"Important", args{"Important"},database.HighSeverity},
- {"Critical", args{"Critical"},database.CriticalSeverity},
- {"Unknown", args{"Unknown"},database.UnknownSeverity},
+ {"None", args{"None"}, database.NegligibleSeverity},
+ {"Low", args{"Low"}, database.LowSeverity},
+ {"Moderate", args{"Moderate"}, database.MediumSeverity},
+ {"Important", args{"Important"}, database.HighSeverity},
+ {"Critical", args{"Critical"}, database.CriticalSeverity},
+ {"Unknown", args{"Unknown"}, database.UnknownSeverity},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := GetSeverity(tt.args.severity); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("GetSeverity(%s->%s) = %v, want %v",
- tt.args.severity,
- strings.Title(tt.args.severity),
- got,
+ t.Errorf("GetSeverity(%s->%s) = %v, want %v",
+ tt.args.severity,
+ strings.Title(tt.args.severity),
+ got,
tt.want)
}
// test as all uppercase
if got := GetSeverity(tt.args.severity); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("GetSeverity(%s->%s) = %v, want %v",
- tt.args.severity,
- strings.Title(strings.ToUpper(tt.args.severity)),
- got,
+ t.Errorf("GetSeverity(%s->%s) = %v, want %v",
+ tt.args.severity,
+ strings.Title(strings.ToUpper(tt.args.severity)),
+ got,
tt.want)
}
// test as all lowercase
if got := GetSeverity(tt.args.severity); !reflect.DeepEqual(got, tt.want) {
- t.Errorf("GetSeverity(%s->%s) = %v, want %v",
- tt.args.severity,
- strings.Title(strings.ToLower(tt.args.severity)),
- got,
+ t.Errorf("GetSeverity(%s->%s) = %v, want %v",
+ tt.args.severity,
+ strings.Title(strings.ToLower(tt.args.severity)),
+ got,
tt.want)
}
})
}
}
+func TestParsedNvrasContains(t *testing.T) {
+ type args struct {
+ parsedNvras []ParsedRmpNvra
+ nvra ParsedRmpNvra
+ }
+ tests := []struct {
+ name string
+ args args
+ want bool
+ }{
+ {"1", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"}},
+ true,
+ },
+ {"2", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch2"}},
+ false,
+ },
+ {"3", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch2"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch3"},
+ },
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"}},
+ true},
+ {"4", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name2", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name3", Evr: "evr1", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name2", Evr: "evr1", Arch: "arch1"}},
+ true},
+ {"5", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr2", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr3", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name1", Evr: "evr3", Arch: "arch1"}},
+ true},
+ {"6", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr2", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr3", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name1", Evr: "evr4", Arch: "arch1"}},
+ false},
+ {"7", args{[]ParsedRmpNvra{
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr2", Arch: "arch1"},
+ ParsedRmpNvra{Name: "name1", Evr: "evr3", Arch: "arch1"},
+ },
+ ParsedRmpNvra{Name: "name2", Evr: "evr2", Arch: "arch1"}},
+ false},
+ {"8", args{[]ParsedRmpNvra{},
+ ParsedRmpNvra{Name: "name1", Evr: "evr1", Arch: "arch1"}},
+ false},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ // debug
+ log.Info(fmt.Sprintf("ParsedNvrasContains(%s, %s)", tt.args.parsedNvras, tt.args.nvra))
+ if got := ParsedNvrasContains(tt.args.parsedNvras, tt.args.nvra); got != tt.want {
+ t.Errorf("ParsedNvrasContains(%v, %v) = %v, want %v", tt.args.parsedNvras, tt.args.nvra, got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseRhsaName(t *testing.T) {
+ type args struct {
+ advisoryDefinition ParsedAdvisory
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ }{
+ {
+ "1",
+ args{
+ ParsedAdvisory{Metadata: OvalV2Metadata{Title: "RHSA-2013:0149: flash-plugin security update (Critical)", Reference: []OvalV2Reference{{RefID: "RHSA-2013:0149"}}}},
+ },
+ "RHSA-2013:0149",
+ },
+ {
+ "2",
+ args{
+ ParsedAdvisory{Metadata: OvalV2Metadata{Title: "RHSA-2013:0149: flash-plugin security update (Critical)", Reference: []OvalV2Reference{{RefID: ""}}}},
+ },
+ "RHSA-2013:0149",
+ },
+ {
+ "3",
+ args{
+ ParsedAdvisory{Metadata: OvalV2Metadata{Title: "RHSA-2013:0149: flash-plugin security update (Critical)", Reference: []OvalV2Reference{}}},
+ },
+ "RHSA-2013:0149",
+ },
+ {
+ "4",
+ args{
+ ParsedAdvisory{Metadata: OvalV2Metadata{Title: "", Reference: []OvalV2Reference{{RefID: ""}}}},
+ },
+ "",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := ParseRhsaName(tt.args.advisoryDefinition); got != tt.want {
+ t.Errorf("ParseRhsaName() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestParseCriteriaForModuleNamespaces(t *testing.T) {
+ type args struct {
+ criteria OvalV2Criteria
+ }
+ tests := []struct {
+ name string
+ args args
+ want []string
+ }{
+ {
+ "One Module",
+ args{
+ OvalV2Criteria{Criterion: []OvalV2Criterion{{Comment: "Module nodejs:12 is enabled", TestRef: "oval:com.redhat.rhea:tst:20200330015"}}},
+ },
+ []string{"nodejs:12"},
+ },
+ {
+ "Non-Module",
+ args{
+ OvalV2Criteria{Criterion: []OvalV2Criterion{{Comment: "vim-filesystem is earlier than vim-filesystem-2:7.4.629-2.el7.x86_64", TestRef: "oval:com.redhat.rhsa:tst:20162972001"}}},
+ },
+ []string{},
+ },
+ {
+ "Three Modules",
+ args{
+ OvalV2Criteria{
+ Criterion: []OvalV2Criterion{
+ {Comment: "Module nodejs:12 is enabled", TestRef: "oval:com.redhat.rhea:tst:20200330015"},
+ {Comment: "Module idm:DL1 is enabled", TestRef: "oval:com.redhat.rhea:tst:20200330015"},
+ {Comment: "Module container-tools:rhel8 is enabled", TestRef: "oval:com.redhat.rhea:tst:20200330015"},
+ },
+ },
+ },
+ []string{"nodejs:12", "idm:DL1", "container-tools:rhel8"},
+ },
+ {
+ "Empty Criteria",
+ args{
+ OvalV2Criteria{},
+ },
+ []string{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := ParseCriteriaForModuleNamespaces(tt.args.criteria)
+ if len(got) == 0 {
+ if len(tt.want) != 0 {
+ t.Errorf("ParseCriteriaForModuleNamespaces() = %v, want %v", got, tt.want)
+ }
+ } else if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("ParseCriteriaForModuleNamespaces() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestVulnerabilityContainsFeature(t *testing.T) {
+ type args struct {
+ vulnerability database.VulnerabilityWithAffected
+ comparisonFeature database.AffectedFeature
+ }
+ tests := []struct {
+ name string
+ args args
+ want bool
+ }{
+ {
+ "found in one feature",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ },
+ },
+ database.AffectedFeature{
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ },
+ true,
+ },
+ {
+ "not found in one feature",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ },
+ },
+ database.AffectedFeature{
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ },
+ false,
+ },
+ {
+ "found in three features",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ {
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ {
+ FeatureName: "third-feature-name",
+ AffectedVersion: "v.0.0.3",
+ FixedInVersion: "v.0.0.3",
+ FeatureType: "third.feature.type",
+ },
+ },
+ },
+ database.AffectedFeature{
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ },
+ true,
+ },
+ {
+ "not found in three features",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ {
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ {
+ FeatureName: "third-feature-name",
+ AffectedVersion: "v.0.0.3",
+ FixedInVersion: "v.0.0.3",
+ FeatureType: "third.feature.type",
+ },
+ },
+ },
+ // imperfect match
+ database.AffectedFeature{
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.4",
+ FixedInVersion: "v.0.0.4",
+ FeatureType: "second.feature.type",
+ },
+ },
+ false,
+ },
+ {
+ "not found in zero features",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{},
+ },
+ database.AffectedFeature{
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ },
+ false,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := VulnerabilityContainsFeature(tt.args.vulnerability, tt.args.comparisonFeature); got != tt.want {
+ t.Errorf("VulnerabilityContainsFeature() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestGetPendingVulnerabilitySliceIndex(t *testing.T) {
+ type args struct {
+ vulnSet []database.VulnerabilityWithAffected
+ lookupVuln database.VulnerabilityWithAffected
+ }
+ tests := []struct {
+ name string
+ args args
+ want int
+ }{
+ {
+ "found among one vuln",
+ args{
+ []database.VulnerabilityWithAffected{
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ },
+ },
+ 0,
+ },
+ {
+ "not found among one vuln",
+ args{
+ []database.VulnerabilityWithAffected{
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln two",
+ },
+ },
+ },
+ -1,
+ },
+ {
+ "found among three vulns",
+ args{
+ []database.VulnerabilityWithAffected{
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ },
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln two",
+ },
+ },
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln three",
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln two",
+ },
+ },
+ },
+ 1,
+ },
+ {
+ "not found among three vulns",
+ args{
+ []database.VulnerabilityWithAffected{
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ },
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln two",
+ },
+ },
+ {
+ Vulnerability: database.Vulnerability{
+ Name: "vuln three",
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln four",
+ },
+ },
+ },
+ -1,
+ },
+ {
+ "not found among zero vulns",
+ args{
+ []database.VulnerabilityWithAffected{},
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln four",
+ },
+ },
+ },
+ -1,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := GetPendingVulnerabilitySliceIndex(tt.args.vulnSet, tt.args.lookupVuln); got != tt.want {
+ t.Errorf("GetPendingVulnerabilitySliceIndex() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestMergeVulnerabilityFeature(t *testing.T) {
+ type args struct {
+ sourceVuln database.VulnerabilityWithAffected
+ targetVuln database.VulnerabilityWithAffected
+ }
+ tests := []struct {
+ name string
+ args args
+ want database.VulnerabilityWithAffected
+ }{
+ // TODO: Add test cases.
+
+ {
+ "one merged from two sets of three features",
+ args{
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ {
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ {
+ FeatureName: "third-feature-name",
+ AffectedVersion: "v.0.0.3",
+ FixedInVersion: "v.0.0.3",
+ FeatureType: "third.feature.type",
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ {
+ FeatureName: "third-feature-name",
+ AffectedVersion: "v.0.0.3",
+ FixedInVersion: "v.0.0.3",
+ FeatureType: "third.feature.type",
+ },
+ {
+ FeatureName: "fourth-feature-name",
+ AffectedVersion: "v.0.0.4",
+ FixedInVersion: "v.0.0.4",
+ FeatureType: "fourth.feature.type",
+ },
+ },
+ },
+ },
+ database.VulnerabilityWithAffected{
+ Vulnerability: database.Vulnerability{
+ Name: "vuln one",
+ },
+ Affected: []database.AffectedFeature{
+ {
+ FeatureName: "second-feature-name",
+ AffectedVersion: "v.0.0.2",
+ FixedInVersion: "v.0.0.2",
+ FeatureType: "second.feature.type",
+ },
+ {
+ FeatureName: "third-feature-name",
+ AffectedVersion: "v.0.0.3",
+ FixedInVersion: "v.0.0.3",
+ FeatureType: "third.feature.type",
+ },
+ {
+ FeatureName: "fourth-feature-name",
+ AffectedVersion: "v.0.0.4",
+ FixedInVersion: "v.0.0.4",
+ FeatureType: "fourth.feature.type",
+ },
+ // additional source features will be apppended to the end of the target slice
+ {
+ FeatureName: "first-feature-name",
+ AffectedVersion: "v.0.0.1",
+ FixedInVersion: "v.0.0.1",
+ FeatureType: "first.feature.type",
+ },
+ },
+ },
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if got := MergeVulnerabilityFeature(tt.args.sourceVuln, tt.args.targetVuln); !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("MergeVulnerabilityFeature() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
type mockDatastore struct {
database.MockDatastore
@@ -534,4 +1127,3 @@ func newmockDatastore() *mockDatastore {
}
return md
}
-
diff --git a/ext/vulnsrc/redhat/structs.go b/ext/vulnsrc/redhat/structs.go
index 737baa2d4c..7a83a4cb14 100644
--- a/ext/vulnsrc/redhat/structs.go
+++ b/ext/vulnsrc/redhat/structs.go
@@ -35,177 +35,185 @@ type ManifestEntry struct {
// OvalV2Document - represents an uncompressed ovalV2 bzip document linked from PULP_MANIFEST
type OvalV2Document struct {
- XMLName xml.Name `xml:"oval_definitions"`
- DefinitionSet OvalV2AdvisoryDefinitions `xml:"definitions"`
- TestSet OvalV2Tests `xml:"tests"`
- ObjectSet OvalV2Objects `xml:"objects"`
- StateSet OvalV2States `xml:"states"`
+ XMLName xml.Name `xml:"oval_definitions"`
+ DefinitionSet OvalV2AdvisoryDefinitions `xml:"definitions"`
+ TestSet OvalV2Tests `xml:"tests"`
+ ObjectSet OvalV2Objects `xml:"objects"`
+ StateSet OvalV2States `xml:"states"`
}
// OvalV2AdvisoryDefinitions - OvalV2Document.definition array
type OvalV2AdvisoryDefinitions struct {
- Definitions []OvalV2AdvisoryDefinition `xml:"definition"`
+ Definitions []OvalV2AdvisoryDefinition `xml:"definition"`
}
// OvalV2AdvisoryDefinition - single definition from ovalV2 document
type OvalV2AdvisoryDefinition struct {
- Class string `xml:"class,attr"`
- ID string `xml:"id,attr"`
- Version string `xml:"version,attr"`
- Metadata OvalV2Metadata `xml:"metadata"`
- Criteria OvalV2Criteria `xml:"criteria"`
+ Class string `xml:"class,attr"`
+ ID string `xml:"id,attr"`
+ Version string `xml:"version,attr"`
+ Metadata OvalV2Metadata `xml:"metadata"`
+ Criteria OvalV2Criteria `xml:"criteria"`
}
// OvalV2Metadata - advisory metadata
type OvalV2Metadata struct {
- Title string `xml:"title"`
- Description string `xml:"description"`
- Advisory OvalV2Advisory `xml:"advisory"`
+ Title string `xml:"title"`
+ Reference []OvalV2Reference `xml:"reference"`
+ Description string `xml:"description"`
+ Advisory OvalV2Advisory `xml:"advisory"`
+}
+
+// OvalV2Reference - advisory reference
+type OvalV2Reference struct {
+ RefID string `xml:"ref_id"`
+ RefURL string `xml:"ref_url"`
+ Source string `xml:"source"`
}
// OvalV2Advisory - advisory data
type OvalV2Advisory struct {
- Issued OvalV2AdvisoryIssued `xml:"issued"`
- Updated OvalV2AdvisoryUpdated `xml:"updated"`
- Severity string `xml:"severity"`
- CveList []OvalV2CveData `xml:"cve"`
- AffectedCpeList OvalV2Cpe `xml:"affected_cpe_list"`
+ Issued OvalV2AdvisoryIssued `xml:"issued"`
+ Updated OvalV2AdvisoryUpdated `xml:"updated"`
+ Severity string `xml:"severity"`
+ CveList []OvalV2CveData `xml:"cve"`
+ AffectedCpeList OvalV2Cpe `xml:"affected_cpe_list"`
}
// OvalV2AdvisoryIssued - date advisory was issued (YYYY-MM-DD)
type OvalV2AdvisoryIssued struct {
- Date string `xml:"date,attr"`
+ Date string `xml:"date,attr"`
}
// OvalV2AdvisoryUpdated - date advisory was issued (YYYY-MM-DD)
type OvalV2AdvisoryUpdated struct {
- Date string `xml:"date,attr"`
+ Date string `xml:"date,attr"`
}
// OvalV2CveData - advisory cve data
type OvalV2CveData struct {
- XMLName xml.Name `xml:"cve"`
- Cvss3 string `xml:"cvss3,attr"`
- Cwe string `xml:"cwe,attr"`
- Href string `xml:"href,attr"`
- Public string `xml:"public,attr"`
- Value string `xml:",chardata"`
+ XMLName xml.Name `xml:"cve"`
+ Cvss3 string `xml:"cvss3,attr"`
+ Cwe string `xml:"cwe,attr"`
+ Href string `xml:"href,attr"`
+ Public string `xml:"public,attr"`
+ Value string `xml:",chardata"`
}
// OvalV2Cpe - advisory affected cpes
type OvalV2Cpe struct {
- Cpe []string `xml:"cpe"`
+ Cpe []string `xml:"cpe"`
}
// CpeName - cpe name components
type CpeName struct {
- Part string
- Vendor string
- Product string
- Version string
- Update string
- Edition string
- Language string
+ Part string
+ Vendor string
+ Product string
+ Version string
+ Update string
+ Edition string
+ Language string
}
// OvalV2Criteria - advisory-related criteria set
type OvalV2Criteria struct {
- Criterion []OvalV2Criterion `xml:"criterion"`
- Criteria []OvalV2Criteria `xml:"criteria"`
+ Criterion []OvalV2Criterion `xml:"criterion"`
+ Criteria []OvalV2Criteria `xml:"criteria"`
}
// OvalV2Criterion - advisory-related criteria item
type OvalV2Criterion struct {
- XMLName xml.Name `xml:"criterion"`
- Comment string `xml:"comment,attr"`
- TestRef string `xml:"test_ref,attr"`
+ XMLName xml.Name `xml:"criterion"`
+ Comment string `xml:"comment,attr"`
+ TestRef string `xml:"test_ref,attr"`
}
// OvalV2Tests - oval tests
type OvalV2Tests struct {
- XMLName xml.Name `xml:"tests"`
- Tests []OvalV2RpmInfoTest `xml:"rpminfo_test"`
+ XMLName xml.Name `xml:"tests"`
+ Tests []OvalV2RpmInfoTest `xml:"rpminfo_test"`
}
// OvalV2RpmInfoTest - oval tests.rpminfo_test
type OvalV2RpmInfoTest struct {
- Comment string `xml:"comment,attr"`
- ID string `xml:"id,attr"`
- ObjectRef RpmInfoTestObjectRef `xml:"object"`
- StateRef RpmInfoTestStateRef `xml:"state"`
+ Comment string `xml:"comment,attr"`
+ ID string `xml:"id,attr"`
+ ObjectRef RpmInfoTestObjectRef `xml:"object"`
+ StateRef RpmInfoTestStateRef `xml:"state"`
}
// RpmInfoTestObjectRef - oval reference to test info
type RpmInfoTestObjectRef struct {
- Ref string `xml:"object_ref,attr"`
+ Ref string `xml:"object_ref,attr"`
}
// RpmInfoTestStateRef - oval reference to state info
type RpmInfoTestStateRef struct {
- Ref string `xml:"state_ref,attr"`
+ Ref string `xml:"state_ref,attr"`
}
// OvalV2Objects - ovalV2 objects set
type OvalV2Objects struct {
- XMLName xml.Name `xml:"objects"`
- Objects []OvalV2RpmInfoObject `xml:"rpminfo_object"`
+ XMLName xml.Name `xml:"objects"`
+ Objects []OvalV2RpmInfoObject `xml:"rpminfo_object"`
}
// OvalV2RpmInfoObject - rpm info
type OvalV2RpmInfoObject struct {
- ID string `xml:"id,attr"`
- Version string `xml:"version,attr"`
- Name string `xml:"name"`
+ ID string `xml:"id,attr"`
+ Version string `xml:"version,attr"`
+ Name string `xml:"name"`
}
// OvalV2States - state info
type OvalV2States struct {
- XMLName xml.Name `xml:"states"`
- States []OvalV2RpmInfoState `xml:"rpminfo_state"`
+ XMLName xml.Name `xml:"states"`
+ States []OvalV2RpmInfoState `xml:"rpminfo_state"`
}
// OvalV2RpmInfoState - state info
type OvalV2RpmInfoState struct {
- ID string `xml:"id,attr"`
- Version string `xml:"version,attr"`
- Arch RpmInfoStateChild `xml:"arch"`
- Evr RpmInfoStateChild `xml:"evr"`
+ ID string `xml:"id,attr"`
+ Version string `xml:"version,attr"`
+ Arch RpmInfoStateChild `xml:"arch"`
+ Evr RpmInfoStateChild `xml:"evr"`
}
// RpmInfoStateChild - arch and evr state info
type RpmInfoStateChild struct {
- DataType string `xml:"datatype,attr"`
- Operation string `xml:"operation,attr"`
- Value string `xml:",chardata"`
+ DataType string `xml:"datatype,attr"`
+ Operation string `xml:"operation,attr"`
+ Value string `xml:",chardata"`
}
// OvalV2DefinitionNamespaces - module and cpe namespace info
type OvalV2DefinitionNamespaces struct {
- ModuleNamespaces []string
- CpeNamespaces []string
+ ModuleNamespaces []string
+ CpeNamespaces []string
}
// RpmNvra - rpm nvra info
type RpmNvra struct {
- Name string
- Version string
- Release string
- Arch string
+ Name string
+ Version string
+ Release string
+ Arch string
}
// ParsedAdvisory - parsed advisory info, including relevant criteria and package references
type ParsedAdvisory struct {
- Class string
- ID string
- Version string
- Metadata OvalV2Metadata
- Criteria OvalV2Criteria
- PackageList []ParsedRmpNvra
+ Class string
+ ID string
+ Version string
+ Metadata OvalV2Metadata
+ Criteria OvalV2Criteria
+ PackageList []ParsedRmpNvra
}
// ParsedRmpNvra - parsed rpm nvra info
type ParsedRmpNvra struct {
- Name string
- Evr string
- Arch string
+ Name string
+ Evr string
+ Arch string
}
diff --git a/ext/vulnsrc/redhat/testdata/v2/ansible-1.x.oval.xml b/ext/vulnsrc/redhat/testdata/v2/ansible-1.x.oval.xml
new file mode 100644
index 0000000000..37e1afdb6b
--- /dev/null
+++ b/ext/vulnsrc/redhat/testdata/v2/ansible-1.x.oval.xml
@@ -0,0 +1,347 @@
+
+
+
+ Red Hat OVAL Patch Definition Merger
+ 3
+ 5.10
+ 2020-01-23T19:19:23
+ 1579807163
+
+
+
+
+ RHSA-2019:1708: ansible security and bug fix update (Moderate)
+
+ Red Hat Ansible Engine 2.8 for RHEL 8
+
+
+
+ Ansible is a simple model-driven configuration management, multi-node
+deployment, and remote-task execution system. Ansible works over SSH and
+does
+not require any software or daemons to be installed on remote nodes.
+Extension
+modules can be written in any language and are transferred to managed
+machines
+automatically.
+
+The following packages have been upgraded to a newer upstream version:
+ansible
+(2.8.2)
+
+Security fix(es):
+
+* ansible: unsafe template evaluation of returned module data can lead to information disclosure (CVE-2019-10156)
+
+For more details about the security issue(s), including the impact, a CVSS
+score, and other related information, refer to the CVE page(s) listed in the
+References section.
+
+Bug Fix(es):
+
+See:
+https://github.com/ansible/ansible/blob/v2.8.2/changelogs/CHANGELOG-v2.8.rst
+for details on bug fixes in this release.
+
+ Moderate
+ Copyright 2019 Red Hat, Inc.
+
+
+ CVE-2019-10156
+ CVE-2019-10156 ansible: unsafe template evaluation of returned module data can lead to information disclosure
+
+ cpe:/a:redhat:ansible_engine:2.8
+ cpe:/a:redhat:ansible_engine:2.8::el8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RHSA-2019:2542: Ansible security and bug fix update (Moderate)
+
+ Red Hat Ansible Engine 2.8 for RHEL 8
+
+
+
+
+ Ansible is a simple model-driven configuration management, multi-node deployment, and remote-task execution system. Ansible works over SSH and does not require any software or daemons to be installed on remote nodes. Extension modules can be written in any language and are transferred to managed machines automatically.
+
+The following packages have been upgraded to a newer upstream version: ansible (2.8.4)
+
+Security fix(es):
+
+* Ansible: data disclosure when a password from the prompt contains template characters (CVE-2019-10206)
+* Ansible: gcp modules do not flag sensitive data fields properly (CVE-2019-10217)
+
+For more details about the security issue(s), including the impact, a CVSS score, and other related information, refer to the CVE page(s) listed in the References section.
+
+Bug Fix(es):
+
+For details on bug fixes in this release see:
+
+https://github.com/ansible/ansible/blob/v2.8.4/changelogs/CHANGELOG-v2.8.rst
+
+ Moderate
+ Copyright 2019 Red Hat, Inc.
+
+
+ CVE-2019-10206
+ CVE-2019-10217
+ CVE-2019-10206 Ansible: disclosure data when prompted for password and template characters are passed
+ CVE-2019-10217 Ansible: gcp modules do not flag sensitive data fields properly
+
+ cpe:/a:redhat:ansible_engine:2.8
+
+ cpe:/a:redhat:ansible_engine:2.8::el8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RHSA-2019:3203: Ansible security and bug fix update (Important)
+
+ Red Hat Ansible Engine 2.8 for RHEL 8
+
+
+
+
+
+ Ansible is a simple model-driven configuration management, multi-node deployment, and remote-task execution system. Ansible works over SSH and does not require any software or daemons to be installed on remote nodes. Extension modules can be written in any language and are transferred to managed machines automatically.
+
+The following packages have been upgraded to a newer upstream version:
+ansible (2.8.6)
+
+Bug Fix(es):
+
+* ansible: incomplete fix for CVE-2019-10206 (CVE-2019-14856)
+* ansible: sub parameters marked as no_log are not masked in certain failure scenarios (CVE-2019-14858)
+* ansible: secrets disclosed on logs when no_log enabled (CVE-2019-14846)
+
+See:
+
+https://github.com/ansible/ansible/blob/v2.8.6/changelogs/CHANGELOG-v2.8.rst
+
+for details on bug fixes in this release.
+
+ Important
+ Copyright 2019 Red Hat, Inc.
+
+
+ CVE-2019-14846
+ CVE-2019-14856
+ CVE-2019-14858
+ CVE-2019-14846 ansible: secrets disclosed on logs when no_log enabled
+ CVE-2019-14858 ansible: sub parameters marked as no_log are not masked in certain failure scenarios
+ CVE-2019-14856 ansible: Incomplete fix for CVE-2019-10206
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RHSA-2019:3926: ansible security and bug fix update (Moderate)
+
+ Red Hat Ansible Engine 2.8 for RHEL 8
+
+
+
+ Ansible is a simple model-driven configuration management, multi-node
+deployment, and remote-task execution system. Ansible works over SSH and
+does not require any software or daemons to be installed on remote nodes.
+Extension modules can be written in any language and are transferred to
+managed machines automatically.
+
+Security Fix(es):
+
+Ansible: Splunk and Sumologic callback plugins leak sensitive data in logs
+(CVE-2019-14864)
+
+For more details about the security issue(s), including the impact, a CVSS
+score, acknowledgments, and other related information, refer to the CVE
+page(s) listed in the References section.
+
+The following packages have been upgraded to a newer upstream version:
+ansible (2.8.7)
+
+Bug Fix(es):
+
+See:
+https://github.com/ansible/ansible/blob/v2.8.7/changelogs/CHANGELOG-v2.8.rst
+for details on bug fixes in this release.
+
+ Moderate
+ Copyright 2019 Red Hat, Inc.
+
+
+ CVE-2019-14864
+ CVE-2019-14864 Ansible: Splunk and Sumologic callback plugins leak sensitive data in logs
+
+ cpe:/a:redhat:ansible_engine:2.8
+ cpe:/a:redhat:ansible_engine:2.8::el8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RHSA-2020:0216: Ansible security and bug fix update (2.8.8) (Moderate)
+
+ Red Hat Ansible Engine 2.8 for RHEL 8
+
+
+
+
+ Ansible is a simple model-driven configuration management, multi-node
+deployment, and remote-task execution system. Ansible works over SSH and
+does not require any software or daemons to be installed on remote nodes.
+Extension modules can be written in any language and are transferred to
+managed machines automatically.
+
+The following packages have been upgraded to a newer upstream version:
+ansible (2.8.8)
+
+Bug Fix(es):
+* CVE-2019-14904 Ansible: vulnerability in solaris_zone module via crafted
+solaris zone
+* CVE-2019-14905 Ansible: malicious code could craft filename in
+nxos_file_copy module
+
+See:
+https://github.com/ansible/ansible/blob/v2.8.8/changelogs/CHANGELOG-v2.8.rst
+for details on bug fixes in this release.
+
+ Moderate
+ Copyright 2020 Red Hat, Inc.
+
+
+ CVE-2019-14904
+ CVE-2019-14905
+ CVE-2019-14905 Ansible: malicious code could craft filename in nxos_file_copy module
+ CVE-2019-14904 Ansible: vulnerability in solaris_zone module via crafted solaris zone
+
+ cpe:/a:redhat:ansible_engine:2.8
+ cpe:/a:redhat:ansible_engine:2.8::el8
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ansible
+
+
+
+
+
+
+
+
+ /etc/redhat-release
+
+
+
+
+ 0:2.8.2-1.el8ae
+
+
+ 199e2f91fd431d51
+
+
+ ^redhat-release
+ ^8[^\d]
+
+
+ ^redhat-release
+
+
+ 0:2.8.4-1.el8ae
+
+
+ 0:2.8.6-1.el8ae
+
+
+ 0:2.8.7-1.el8ae
+
+
+ 0:2.8.8-1.el8ae
+
+
+
diff --git a/ext/vulnsrc/ubuntu/testdata/fetcher_ubuntu_test.txt b/ext/vulnsrc/ubuntu/testdata/fetcher_ubuntu_test.txt
new file mode 100644
index 0000000000..952f1095da
--- /dev/null
+++ b/ext/vulnsrc/ubuntu/testdata/fetcher_ubuntu_test.txt
@@ -0,0 +1,35 @@
+Candidate: CVE-2015-4471
+PublicDate: 2015-06-11
+References:
+ http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-4471
+ http://www.openwall.com/lists/oss-security/2015/02/03/11
+ https://github.com/kyz/libmspack/commit/18b6a2cc0b87536015bedd4f7763e6b02d5aa4f3
+ https://bugs.debian.org/775499
+ http://openwall.com/lists/oss-security/2015/02/03/11
+Description:
+ Off-by-one error in the lzxd_decompress function in lzxd.c in libmspack
+ before 0.5 allows remote attackers to cause a denial of service (buffer
+ under-read and application crash) via a crafted CAB archive.
+Ubuntu-Description:
+Notes:
+Bugs:
+ http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=775499
+Priority: medium (wrong-syntax)
+Discovered-by:
+Assigned-to:
+
+Patches_libmspack:
+upstream_libmspack: not-affected (0.5-1)
+precise_libmspack: DNE
+trusty_libmspack: needed
+utopic_libmspack: ignored (reached end-of-life)
+vivid_libmspack : released ( 0.4-3 )
+devel_libmspack: not-affected
+unknown_libmspack: needed
+
+Patches_libmspack-anotherpkg: wrong-syntax
+wily_libmspack-anotherpkg: released ((0.1)
+utopic_libmspack-anotherpkg: not-affected
+trusty_libmspack-anotherpkg: needs-triage
+precise_libmspack-anotherpkg: released
+saucy_libmspack-anotherpkg: needed
diff --git a/ext/vulnsrc/ubuntu/ubuntu.go b/ext/vulnsrc/ubuntu/ubuntu.go
index 0073d255d0..3962b8a0bd 100644
--- a/ext/vulnsrc/ubuntu/ubuntu.go
+++ b/ext/vulnsrc/ubuntu/ubuntu.go
@@ -1,4 +1,4 @@
-// Copyright 2019 clair authors
+// Copyright 2018 clair authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -13,20 +13,18 @@
// limitations under the License.
// Package ubuntu implements a vulnerability source updater using the
-// Ubuntu Linux OVAL Database.
+// Ubuntu CVE Tracker.
package ubuntu
import (
"bufio"
- "compress/bzip2"
- "encoding/xml"
+ "errors"
"fmt"
"io"
- "net/http"
+ "os"
+ "path/filepath"
"regexp"
- "strconv"
"strings"
- "time"
log "github.com/sirupsen/logrus"
@@ -34,77 +32,73 @@ import (
"github.com/quay/clair/v3/ext/versionfmt"
"github.com/quay/clair/v3/ext/versionfmt/dpkg"
"github.com/quay/clair/v3/ext/vulnsrc"
- "github.com/quay/clair/v3/pkg/commonerr"
+ "github.com/quay/clair/v3/pkg/gitutil"
)
const (
- ovalURI = "https://people.canonical.com/~ubuntu-security/oval/"
-
- // "Thu, 30 Nov 2017 03:07:57 GMT
- timeFormatLastModified = "Mon, 2 Jan 2006 15:04:05 MST"
-
- // timestamp format 2017-10-23T04:07:14
- timeFormatOVAL = "2006-1-2T15:04:05"
-
- updaterFlag = "ubuntuUpdater"
-
- ubuntuOvalFilePrefix = "com.ubuntu."
+ trackerURI = "https://git.launchpad.net/ubuntu-cve-tracker"
+ updaterFlag = "ubuntuUpdater"
+ cveURL = "http://people.ubuntu.com/~ubuntu-security/cve/%s"
+ affectedType = database.SourcePackage
)
var (
- ignoredCriterions []string
- ubuntuPackageCommentRegexp = regexp.MustCompile(`^(.*) package in ([a-z]+) (?:(?:was vulnerable|is related to the CVE in some way) but has been fixed \(note: '(.*)'\)|is affected and needs fixing).$`)
- ubuntuOvalFileRegexp = regexp.MustCompile(`com.ubuntu.([a-z]+).cve.oval.xml.bz2`)
- ubuntuOvalIgnoredRegexp = regexp.MustCompile(`(artful|cosmic|trusty|precise)`)
-)
-
-type oval struct {
- Timestamp string `xml:"generator>timestamp"`
- Definitions []definition `xml:"definitions>definition"`
-}
-
-type definition struct {
- Title string `xml:"metadata>title"`
- Description string `xml:"metadata>description"`
- References []reference `xml:"metadata>reference"`
- Severity string `xml:"metadata>advisory>severity"`
- Criteria criteria `xml:"criteria"`
-}
+ ubuntuIgnoredReleases = map[string]struct{}{
+ "upstream": {},
+ "devel": {},
+
+ "dapper": {},
+ "edgy": {},
+ "feisty": {},
+ "gutsy": {},
+ "hardy": {},
+ "intrepid": {},
+ "jaunty": {},
+ "karmic": {},
+ "lucid": {},
+ "maverick": {},
+ "natty": {},
+ "oneiric": {},
+ "saucy": {},
+
+ "vivid/ubuntu-core": {},
+ "vivid/stable-phone-overlay": {},
+
+ // Syntax error
+ "Patches": {},
+ // Product
+ "product": {},
+ }
-type reference struct {
- Source string `xml:"source,attr"`
- URI string `xml:"ref_url,attr"`
-}
+ affectsCaptureRegexp = regexp.MustCompile(`(?P.*)_(?P.*): (?P[^\s]*)( \(+(?P[^()]*)\)+)?`)
+ affectsCaptureRegexpNames = affectsCaptureRegexp.SubexpNames()
-type criteria struct {
- Operator string `xml:"operator,attr"`
- Criterias []*criteria `xml:"criteria"`
- Criterions []criterion `xml:"criterion"`
-}
+ errUnknownRelease = errors.New("found packages with CVEs for a verison of Ubuntu that Clair doesn't know about")
+)
-type criterion struct {
- TestRef string `xml:"test_ref,attr"`
- Comment string `xml:"comment,attr"`
+type updater struct {
+ repositoryLocalPath string
}
-type updater struct{}
-
func init() {
vulnsrc.RegisterUpdater("ubuntu", &updater{})
}
-func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateResponse, err error) {
- log.WithField("package", "Ubuntu Linux").Info("Start fetching vulnerabilities")
+func (u *updater) Update(db database.Datastore) (resp vulnsrc.UpdateResponse, err error) {
+ log.WithField("package", "Ubuntu").Info("Start fetching vulnerabilities")
- // ubuntu has one single xml file per release for all the products,
- // there are no incremental xml files. We store into the database
- // the value of the generation timestamp of the latest file we
- // parsed.
- flagValue, ok, err := database.FindKeyValueAndRollback(datastore, updaterFlag)
+ // Pull the master branch.
+ var commit string
+ u.repositoryLocalPath, commit, err = gitutil.CloneOrPull(trackerURI, u.repositoryLocalPath, updaterFlag)
if err != nil {
return resp, err
}
- log.WithField("flagvalue", flagValue).Debug("Generation timestamp of latest parsed file")
+
+ // Ask the database for the latest commit we successfully applied.
+ dbCommit, ok, err := database.FindKeyValueAndRollback(db, updaterFlag)
+ if err != nil {
+ return
+ }
if !ok {
dbCommit = ""
@@ -120,16 +114,15 @@ func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateRespo
return
}
- // this contains the modification time of the most recent
- // file expressed as unix time (int64)
- latestOval, err := strconv.ParseInt(flagValue, 10, 64)
+ // Get the list of vulnerabilities that we have to update.
+ var modifiedCVE map[string]struct{}
+ modifiedCVE, err = collectModifiedVulnerabilities(commit, dbCommit, u.repositoryLocalPath)
if err != nil {
- // something went wrong, force parsing of all files
- latestOval = 0
+ return
}
- // Fetch the update list.
- r, err := http.Get(ovalURI)
+ // Get the list of vulnerabilities.
+ resp.Vulnerabilities, resp.Notes, err = collectVulnerabilitiesAndNotes(u.repositoryLocalPath, modifiedCVE)
if err != nil {
return
}
@@ -138,305 +131,252 @@ func (u *updater) Update(datastore database.Datastore) (resp vulnsrc.UpdateRespo
// We don't want the commit to be considered as managed in that case.
if len(resp.Notes) != 0 {
resp.Flags[updaterFlag] = dbCommit
+
}
- defer r.Body.Close()
- var ovalFiles []string
- var generationTimes []int64
+ return
+}
- scanner := bufio.NewScanner(r.Body)
- for scanner.Scan() {
- line := scanner.Text()
- r := ubuntuOvalFileRegexp.FindStringSubmatch(line)
- if len(r) != 2 {
- continue
- }
- release := r[1]
+func (u *updater) Clean() {
+ if u.repositoryLocalPath != "" {
+ os.RemoveAll(u.repositoryLocalPath)
+ }
+}
- // check if we should ignore this release
- ignored := ubuntuOvalIgnoredRegexp.FindString(release)
- if ignored != "" {
- continue
+func collectModifiedVulnerabilities(commit, dbCommit, repositoryLocalPath string) (map[string]struct{}, error) {
+ modifiedCVE := make(map[string]struct{})
+ for _, dirName := range []string{"active", "retired"} {
+ if err := processDirectory(repositoryLocalPath, dirName, modifiedCVE); err != nil {
+ return nil, err
}
+ }
+ return modifiedCVE, nil
+}
- ovalFile := ovalURI + ubuntuOvalFilePrefix + release + ".cve.oval.xml.bz2"
- log.WithFields(log.Fields{
- "ovalFile": ovalFile,
- "updater": "Ubuntu Linux",
- }).Debug("file to check")
+func processDirectory(repositoryLocalPath, dirName string, modifiedCVE map[string]struct{}) error {
+ // Open the directory.
+ d, err := os.Open(filepath.Join(repositoryLocalPath, dirName))
+ if err != nil {
+ log.WithError(err).Error("could not open Ubuntu vulnerabilities repository's folder")
+ return vulnsrc.ErrFilesystem
+ }
+ defer d.Close()
- // Do not fetch the entire file to get the value of the
- // creation time. Rely on the "latest modified time"
- // value of the file hosted on the remote server.
- timestamp, err := getLatestModifiedTime(ovalFile)
- if err != nil {
- log.WithError(err).WithField("ovalFile", ovalFile).Warning("Ignoring OVAL file")
- }
+ // Get the FileInfo of all the files in the directory.
+ names, err := d.Readdirnames(-1)
+ if err != nil {
+ log.WithError(err).Error("could not read Ubuntu vulnerabilities repository's folder")
+ return vulnsrc.ErrFilesystem
+ }
- if timestamp > latestOval {
- ovalFiles = append(ovalFiles, ovalFile)
+ // Add the vulnerabilities to the list.
+ for _, name := range names {
+ if strings.HasPrefix(name, "CVE-") {
+ modifiedCVE[dirName+"/"+name] = struct{}{}
}
}
- for _, oval := range ovalFiles {
- log.WithFields(log.Fields{
- "ovalFile": oval,
- "updater": "Ubuntu Linux",
- }).Debug("downloading")
- // Download the oval XML file.
- r, err := http.Get(oval)
+ return nil
+}
+
+func collectVulnerabilitiesAndNotes(repositoryLocalPath string, modifiedCVE map[string]struct{}) ([]database.VulnerabilityWithAffected, []string, error) {
+ vulns := make([]database.VulnerabilityWithAffected, 0)
+ noteSet := make(map[string]struct{})
+
+ for cvePath := range modifiedCVE {
+ // Open the CVE file.
+ file, err := os.Open(filepath.Join(repositoryLocalPath, cvePath))
if err != nil {
- log.WithError(err).Error("could not download Ubuntu update list")
- return resp, commonerr.ErrCouldNotDownload
+ // This can happen when a file is modified then moved in another commit.
+ continue
}
- defer r.Body.Close()
- // Parse the XML.
- vs, generationTime, err := parseOval(bzip2.NewReader(r.Body))
+ // Parse the vulnerability.
+ v, unknownReleases, err := parseUbuntuCVE(file)
if err != nil {
- return resp, err
+ file.Close()
+ return nil, nil, err
}
- generationTimes = append(generationTimes, generationTime)
-
- // Collect vulnerabilities.
- resp.Vulnerabilities = append(resp.Vulnerabilities, vs...)
- }
- // Set the flag if we found anything.
- if len(generationTimes) > 0 {
- resp.Flags = make(map[string]string)
- resp.Flags[updaterFlag] = strconv.FormatInt(latest(generationTimes), 10)
- } else {
- log.WithField("package", "Ubuntu Linux").Debug("no update")
- }
+ // Add the vulnerability to the response.
+ vulns = append(vulns, v)
- return resp, nil
-}
-
-// Get the latest modification time of a remote file
-// expressed as unix time
-func getLatestModifiedTime(url string) (int64, error) {
- resp, err := http.Head(url)
- if err != nil {
- return 0, err
- }
- defer resp.Body.Close()
+ // Store any unknown releases as notes.
+ for k := range unknownReleases {
+ noteSet[errUnknownRelease.Error()+": "+k] = struct{}{}
+ }
- last_modified := resp.Header.Get("Last-Modified")
- if len(last_modified) == 0 {
- return 0, fmt.Errorf("last modified header missing")
+ file.Close()
}
- timestamp, err := time.Parse(timeFormatLastModified, last_modified)
- if err != nil {
- return 0, err
+ // Convert the note set into a slice.
+ var notes []string
+ for note := range noteSet {
+ notes = append(notes, note)
}
- return timestamp.Unix(), nil
+ return vulns, notes, nil
}
-func latest(values []int64) (ret int64) {
- for _, element := range values {
- if element > ret {
- ret = element
- }
- }
- return
-}
+func parseUbuntuCVE(fileContent io.Reader) (vulnerability database.VulnerabilityWithAffected, unknownReleases map[string]struct{}, err error) {
+ unknownReleases = make(map[string]struct{})
+ readingDescription := false
+ scanner := bufio.NewScanner(fileContent)
-func (u *updater) Clean() {}
+ // only unique major releases will be considered. All sub releases' (e.g.
+ // precise/esm) features are considered belong to major releases.
+ uniqueRelease := map[string]struct{}{}
-func parseOval(ovalReader io.Reader) (vulnerabilities []database.VulnerabilityWithAffected, generationTime int64, err error) {
- // Decode the XML.
- var ov oval
- err = xml.NewDecoder(ovalReader).Decode(&ov)
- if err != nil {
- log.WithError(err).Error("could not decode XML")
- err = commonerr.ErrCouldNotParse
- return
- }
+ for scanner.Scan() {
+ line := strings.TrimSpace(scanner.Text())
- timestamp, err := time.Parse(timeFormatOVAL, ov.Timestamp)
- if err != nil {
- return
- }
- generationTime = timestamp.Unix()
-
- // Iterate over the definitions and collect any vulnerabilities
- // that affect at least one package.
- for _, definition := range ov.Definitions {
- pkgs := toFeatureVersions(definition.Criteria)
- if len(pkgs) > 0 {
- vulnerability := database.VulnerabilityWithAffected{
- Vulnerability: database.Vulnerability{
- Name: name(definition),
- Link: link(definition),
- Severity: severity(definition),
- Description: description(definition),
- },
- }
- vulnerability.Affected = append(vulnerability.Affected, pkgs...)
- vulnerabilities = append(vulnerabilities, vulnerability)
+ // Skip any comments.
+ if strings.HasPrefix(line, "#") {
+ continue
}
- }
- return
-}
+ // Parse the name.
+ if strings.HasPrefix(line, "Candidate:") {
+ vulnerability.Name = strings.TrimSpace(strings.TrimPrefix(line, "Candidate:"))
+ vulnerability.Link = fmt.Sprintf(cveURL, vulnerability.Name)
+ continue
+ }
-func getCriterions(node criteria) [][]criterion {
- // Filter useless criterions.
- var criterions []criterion
- for _, c := range node.Criterions {
- ignored := false
+ // Parse the priority.
+ if strings.HasPrefix(line, "Priority:") {
+ priority := strings.TrimSpace(strings.TrimPrefix(line, "Priority:"))
- for _, ignoredItem := range ignoredCriterions {
- if strings.Contains(c.Comment, ignoredItem) {
- ignored = true
- break
+ // Handle syntax error: Priority: medium (heap-protector)
+ if strings.Contains(priority, " ") {
+ priority = priority[:strings.Index(priority, " ")]
}
- }
- if !ignored {
- criterions = append(criterions, c)
+ vulnerability.Severity = SeverityFromPriority(priority)
+ continue
}
- }
- // assume AND if not specifically OR
- if node.Operator == "OR" {
- var possibilities [][]criterion
- for _, c := range criterions {
- possibilities = append(possibilities, []criterion{c})
+ // Parse the description.
+ if strings.HasPrefix(line, "Description:") {
+ readingDescription = true
+ vulnerability.Description = strings.TrimSpace(strings.TrimPrefix(line, "Description:")) // In case there is a formatting error and the description starts on the same line
+ continue
}
- return possibilities
- } else {
- return [][]criterion{criterions}
- }
-}
-
-func getPossibilities(node criteria) [][]criterion {
- if len(node.Criterias) == 0 {
- return getCriterions(node)
- }
-
- var possibilitiesToCompose [][][]criterion
- for _, criteria := range node.Criterias {
- possibilitiesToCompose = append(possibilitiesToCompose, getPossibilities(*criteria))
- }
- if len(node.Criterions) > 0 {
- possibilitiesToCompose = append(possibilitiesToCompose, getCriterions(node))
- }
-
- var possibilities [][]criterion
- // assume AND if not OR
- if node.Operator == "OR" {
- for _, possibilityGroup := range possibilitiesToCompose {
- for _, possibility := range possibilityGroup {
- possibilities = append(possibilities, possibility)
+ if readingDescription {
+ if strings.HasPrefix(line, "Ubuntu-Description:") || strings.HasPrefix(line, "Notes:") || strings.HasPrefix(line, "Bugs:") || strings.HasPrefix(line, "Priority:") || strings.HasPrefix(line, "Discovered-by:") || strings.HasPrefix(line, "Assigned-to:") {
+ readingDescription = false
+ } else {
+ vulnerability.Description = vulnerability.Description + " " + line
+ continue
}
}
- } else {
- for _, possibility := range possibilitiesToCompose[0] {
- possibilities = append(possibilities, possibility)
- }
- for _, possibilityGroup := range possibilitiesToCompose[1:] {
- var newPossibilities [][]criterion
+ // Try to parse the package that the vulnerability affects.
+ affectsCaptureArr := affectsCaptureRegexp.FindAllStringSubmatch(line, -1)
+ if len(affectsCaptureArr) > 0 {
+ affectsCapture := affectsCaptureArr[0]
- for _, possibility := range possibilities {
- for _, possibilityInGroup := range possibilityGroup {
- var p []criterion
- p = append(p, possibility...)
- p = append(p, possibilityInGroup...)
- newPossibilities = append(newPossibilities, p)
- }
+ md := map[string]string{}
+ for i, n := range affectsCapture {
+ md[affectsCaptureRegexpNames[i]] = strings.TrimSpace(n)
}
- possibilities = newPossibilities
- }
- }
+ // Ignore Linux kernels.
+ if strings.HasPrefix(md["package"], "linux") {
+ continue
+ }
- return possibilities
-}
+ // Only consider the package if its status is needed, active, deferred, not-affected or
+ // released. Ignore DNE (package does not exist), needs-triage, ignored, pending.
+ if md["status"] == "needed" || md["status"] == "active" || md["status"] == "deferred" || md["status"] == "released" || md["status"] == "not-affected" {
+ md["release"] = strings.Split(md["release"], "/")[0]
+ if _, isReleaseIgnored := ubuntuIgnoredReleases[md["release"]]; isReleaseIgnored {
+ continue
+ }
+ if _, isReleaseKnown := database.UbuntuReleasesMapping[md["release"]]; !isReleaseKnown {
+ unknownReleases[md["release"]] = struct{}{}
+ continue
+ }
-func toFeatureVersions(criteria criteria) []database.AffectedFeature {
- var featureVersionParametersArray []database.AffectedFeature
- possibilities := getPossibilities(criteria)
- for _, criterions := range possibilities {
- var featureVersion database.AffectedFeature
-
- // Attempt to parse package data from trees of criterions.
- for _, c := range criterions {
- if match := ubuntuPackageCommentRegexp.FindStringSubmatch(c.Comment); match != nil {
- var version = versionfmt.MaxVersion
- if len(match[3]) > 0 {
- version = match[3]
- err := versionfmt.Valid(dpkg.ParserName, version)
- if err != nil {
- log.WithError(err).WithField("version", version).Warning("could not parse package version. skipping")
+ var version string
+ if md["status"] == "released" {
+ if md["note"] != "" {
+ var err error
+ err = versionfmt.Valid(dpkg.ParserName, md["note"])
+ if err != nil {
+ log.WithError(err).WithField("version", md["note"]).Warning("could not parse package version. skipping")
+ }
+ version = md["note"]
}
+ } else {
+ version = versionfmt.MaxVersion
}
- featureVersion.FeatureType = database.BinaryPackage
- featureVersion.AffectedVersion = version
- if version != versionfmt.MaxVersion {
- featureVersion.FixedInVersion = version
+ if version == "" {
+ continue
+ }
+
+ releaseName := "ubuntu:" + database.UbuntuReleasesMapping[md["release"]]
+ if _, ok := uniqueRelease[releaseName+"_:_"+md["package"]]; ok {
+ continue
}
- featureVersion.FeatureName = match[1]
- featureVersion.Namespace.Name = fmt.Sprintf("ubuntu:%s", match[2])
- featureVersion.Namespace.VersionFormat = dpkg.ParserName
- }
- }
- if featureVersion.Namespace.Name != "" && featureVersion.FeatureName != "" && featureVersion.AffectedVersion != "" {
- featureVersionParametersArray = append(featureVersionParametersArray, featureVersion)
+ uniqueRelease[releaseName+"_:_"+md["package"]] = struct{}{}
+ var fixedinVersion string
+ if version == versionfmt.MaxVersion {
+ fixedinVersion = ""
+ } else {
+ fixedinVersion = version
+ }
+
+ // Create and add the new package.
+ featureVersion := database.AffectedFeature{
+ FeatureType: affectedType,
+ Namespace: database.Namespace{
+ Name: releaseName,
+ VersionFormat: dpkg.ParserName,
+ },
+ FeatureName: md["package"],
+ AffectedVersion: version,
+ FixedInVersion: fixedinVersion,
+ }
+ vulnerability.Affected = append(vulnerability.Affected, featureVersion)
+ }
}
}
- return featureVersionParametersArray
-}
+ // Trim extra spaces in the description
+ vulnerability.Description = strings.TrimSpace(vulnerability.Description)
-func description(def definition) (desc string) {
- // It is much more faster to proceed like this than using a Replacer.
- desc = strings.Replace(def.Description, "\n\n\n", " ", -1)
- desc = strings.Replace(desc, "\n\n", " ", -1)
- desc = strings.Replace(desc, "\n", " ", -1)
- return
-}
-
-func name(def definition) string {
- // only return the CVE identifier which is the first word
- return strings.Split(def.Title, " ")[0]
-}
+ // If no link has been provided (CVE-2006-NNN0 for instance), add the link to the tracker
+ if vulnerability.Link == "" {
+ vulnerability.Link = trackerURI
+ }
-func link(def definition) (link string) {
- for _, reference := range def.References {
- if reference.Source == "CVE" {
- link = reference.URI
- break
- }
+ // If no priority has been provided (CVE-2007-0667 for instance), set the priority to Unknown
+ if vulnerability.Severity == "" {
+ vulnerability.Severity = database.UnknownSeverity
}
return
}
-func severity(def definition) (severity database.Severity) {
- switch def.Severity {
- case "":
- return database.UnknownSeverity
- case "Untriaged":
+// SeverityFromPriority converts an priority from the Ubuntu CVE Tracker into
+// a database.Severity.
+func SeverityFromPriority(priority string) database.Severity {
+ switch priority {
+ case "untriaged":
return database.UnknownSeverity
- case "Negligible":
+ case "negligible":
return database.NegligibleSeverity
- case "Low":
+ case "low":
return database.LowSeverity
- case "Medium":
+ case "medium":
return database.MediumSeverity
- case "High":
+ case "high":
return database.HighSeverity
- case "Critical":
+ case "critical":
return database.CriticalSeverity
default:
- log.Warningf("could not determine a vulnerability severity from: %s", def.Severity)
+ log.Warningf("could not determine a vulnerability severity from: %s", priority)
return database.UnknownSeverity
-
}
}
diff --git a/ext/vulnsrc/ubuntu/ubuntu_test.go b/ext/vulnsrc/ubuntu/ubuntu_test.go
index a20cd3924e..ba80273010 100644
--- a/ext/vulnsrc/ubuntu/ubuntu_test.go
+++ b/ext/vulnsrc/ubuntu/ubuntu_test.go
@@ -31,26 +31,33 @@ func TestUbuntuParser(t *testing.T) {
_, filename, _, _ := runtime.Caller(0)
path := filepath.Join(filepath.Dir(filename))
- // Test parsing testdata/fetcher_openubuntu_test.1.xml
- testFile, _ := os.Open(path + "/testdata/fetcher_ubuntu_test.xml")
- defer testFile.Close()
-
- vulnerabilities, generationTime, err := parseOval(testFile)
- assert.Nil(t, err)
- assert.Equal(t, int64(1565935750), generationTime)
-
- if assert.Nil(t, err) && assert.Len(t, vulnerabilities, 1) {
- var vulnerability = vulnerabilities[0]
+ // Test parsing testdata/fetcher_
+ testData, _ := os.Open(filepath.Join(path, "/testdata/fetcher_ubuntu_test.txt"))
+ defer testData.Close()
+ vulnerability, unknownReleases, err := parseUbuntuCVE(testData)
+ if assert.Nil(t, err) {
assert.Equal(t, "CVE-2015-4471", vulnerability.Name)
assert.Equal(t, database.MediumSeverity, vulnerability.Severity)
- assert.Equal(t, "Off-by-one error in the lzxd_decompress function in lzxd.c in libmspack before 0.5 allows remote attackers to cause a denial of service (buffer under-read and application crash) via a crafted CAB archive. It was discovered that cabextract incorrectly handled certain malformed CAB files. A remote attacker could use this issue to cause cabextract to crash, resulting in a denial of service.", vulnerability.Description)
- assert.Equal(t, "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-4471", vulnerability.Link)
+ assert.Equal(t, "Off-by-one error in the lzxd_decompress function in lzxd.c in libmspack before 0.5 allows remote attackers to cause a denial of service (buffer under-read and application crash) via a crafted CAB archive.", vulnerability.Description)
+
+ // Unknown release (line 28)
+ _, hasUnkownRelease := unknownReleases["unknown"]
+ assert.True(t, hasUnkownRelease)
expectedFeatures := []database.AffectedFeature{
{
- FeatureType: database.BinaryPackage,
+ FeatureType: affectedType,
Namespace: database.Namespace{
- Name: "ubuntu:xenial",
+ Name: "ubuntu:14.04",
+ VersionFormat: dpkg.ParserName,
+ },
+ FeatureName: "libmspack",
+ AffectedVersion: versionfmt.MaxVersion,
+ },
+ {
+ FeatureType: affectedType,
+ Namespace: database.Namespace{
+ Name: "ubuntu:15.04",
VersionFormat: dpkg.ParserName,
},
FeatureName: "libmspack",
@@ -58,13 +65,14 @@ func TestUbuntuParser(t *testing.T) {
AffectedVersion: "0.4-3",
},
{
- FeatureType: database.BinaryPackage,
+ FeatureType: affectedType,
Namespace: database.Namespace{
- Name: "ubuntu:xenial",
+ Name: "ubuntu:15.10",
VersionFormat: dpkg.ParserName,
},
- FeatureName: "cabextract",
- AffectedVersion: versionfmt.MaxVersion,
+ FeatureName: "libmspack-anotherpkg",
+ FixedInVersion: "0.1",
+ AffectedVersion: "0.1",
},
}