diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index eb8f235a5b..2c8cc89cc5 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -22,6 +22,7 @@ import ( "os" "strings" + "github.com/guacsec/guac/internal/testing/ptrfrom" "github.com/guacsec/guac/pkg/dependencies" "github.com/Khan/genqlient/graphql" @@ -124,12 +125,86 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer var path []string var tableRows []table.Row - depVulnPath, depVulnTableRows, err := searchPkgViaHasSBOM(ctx, gqlclient, opts.searchString, opts.depth, opts.isPurl) - if err != nil { - logger.Fatalf("error searching via hasSBOM: %v", err) + if opts.isPurl { + depVulnPath, depVulnTableRows, err := searchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, true) + if err != nil { + logger.Fatalf("error searching via hasSBOM: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + + if len(depVulnPath) == 0 { + pkgInput, err := helpers.PurlToPkg(opts.searchString) + if err != nil { + logger.Fatalf("failed to parse PURL: %v", err) + } + + pkgQualifierFilter := []model.PackageQualifierSpec{} + for _, qualifier := range pkgInput.Qualifiers { + // to prevent https://github.com/golang/go/discussions/56010 + qualifier := qualifier + pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ + Key: qualifier.Key, + Value: &qualifier.Value, + }) + } + + pkgFilter := &model.PkgSpec{ + Type: &pkgInput.Type, + Namespace: pkgInput.Namespace, + Name: &pkgInput.Name, + Version: pkgInput.Version, + Subpath: pkgInput.Subpath, + Qualifiers: pkgQualifierFilter, + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Subject: &model.PackageOrSourceSpec{ + Package: pkgFilter, + }, + }) + + depVulnPath, depVulnTableRows, err = searchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) + if err != nil { + logger.Fatalf("error searching for SBOMs via artifact: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + } + } else { + depVulnPath, depVulnTableRows, err := searchForSBOMViaArtifact(ctx, gqlclient, opts.searchString, opts.depth, true) + if err != nil { + logger.Fatalf("error searching for SBOMs via artifact: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + + if len(depVulnPath) == 0 { + split := strings.Split(opts.searchString, ":") + if len(split) != 2 { + logger.Fatalf("failed to parse artifact. Needs to be in algorithm:digest form") + } + artifactFilter := model.ArtifactSpec{ + Algorithm: ptrfrom.String(strings.ToLower(split[0])), + Digest: ptrfrom.String(strings.ToLower(split[1])), + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Artifact: &artifactFilter, + }) + + depVulnPath, depVulnTableRows, err = searchForSBOMViaPkg(ctx, gqlclient, o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage).Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) + if err != nil { + logger.Fatalf("error searching via hasSBOM: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + } } - path = append(path, depVulnPath...) - tableRows = append(tableRows, depVulnTableRows...) if len(path) > 0 { t.AppendRows(tableRows) @@ -432,7 +507,12 @@ type pkgVersionNeighborQueryResults struct { isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency } -func getVulnAndVexNeighbors(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { +type artifactVersionNeighborQueryResults struct { + pkgVersionNeighborResponse *model.NeighborsResponse + isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence +} + +func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement}) if err != nil { return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) @@ -440,10 +520,18 @@ func getVulnAndVexNeighbors(ctx context.Context, gqlclient graphql.Client, pkgID return &pkgVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isDep: isDep}, nil } -// searchPkgViaHasSBOM takes in either a purl or URI for the initial value to find the hasSBOM node. +func getVulnAndVexNeighborsForArtifact(ctx context.Context, gqlclient graphql.Client, pkgID string, isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence) (*artifactVersionNeighborQueryResults, error) { + pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgeArtifactCertifyVexStatement}) + if err != nil { + return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) + } + return &artifactVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isArt: isArt}, nil +} + +// searchForSBOMViaPkg takes in either a purl or URI for the initial value to find the hasSBOM node. // From there is recursively searches through all the dependencies to determine if it contains hasSBOM nodes. // It concurrent checks the package version node if it contains vulnerabilities and VEX data. -func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, isPurl bool) ([]string, []table.Row, error) { +func searchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { var path []string var tableRows []table.Row checkedPkgIDs := make(map[string]bool) @@ -473,22 +561,15 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt var foundHasSBOMPkg *model.HasSBOMsResponse var err error - // if the initial depth, check if its a purl or an SBOM URI. Otherwise always search by pkgID - if nowNode.depth == 0 { - if isPurl { - pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) - if err != nil { - return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) - } - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) - } - } else { - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Uri: &now}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) - } + // if the initial depth, check if it's a purl or an SBOM URI. Otherwise, always search by pkgID + if nowNode.depth == 0 && primaryCall { + pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) + if err != nil { + return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) + } + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) } } else { foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &now}}}) @@ -542,9 +623,9 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt if !dfsN.expanded { queue = append(queue, pkgID) } - pkgVersionNeighbors, err := getVulnAndVexNeighbors(ctx, gqlclient, pkgID, isDep) + pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, pkgID, isDep) if err != nil { - return nil, nil, fmt.Errorf("getVulnAndVexNeighbors failed with error: %w", err) + return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForPackage failed with error: %w", err) } collectedPkgVersionResults = append(collectedPkgVersionResults, pkgVersionNeighbors) checkedPkgIDs[pkgID] = true @@ -561,20 +642,153 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt for _, result := range collectedPkgVersionResults { for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - if !checkedCertifyVulnIDs[certifyVuln.Vulnerability.Id] { - if certifyVuln.Vulnerability.Type != noVulnType { - checkedCertifyVulnIDs[certifyVuln.Vulnerability.Id] = true - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) + } + path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, + result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, + result.isDep.Package.Id) + } + } + + if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { + for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) + path = append(path, certifyVex.Id, vuln.Id) + } + path = append(path, vexSubjectIds(certifyVex.Subject)...) + } + } + } + return path, tableRows, nil +} + +func searchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { + var path []string + var tableRows []table.Row + checkedArtifactIDs := make(map[string]bool) + var collectedArtifactResults []*artifactVersionNeighborQueryResults + + queue := make([]string, 0) // the queue of nodes in bfs + type dfsNode struct { + expanded bool // true once all node neighbors are added to queue + parent string + artID string + depth int + } + nodeMap := map[string]dfsNode{} + + nodeMap[searchString] = dfsNode{} + queue = append(queue, searchString) + + for len(queue) > 0 { + now := queue[0] + queue = queue[1:] + nowNode := nodeMap[now] + + if maxLength != 0 && nowNode.depth >= maxLength { + break + } + + var foundHasSBOMPkg *model.HasSBOMsResponse + var err error + + if nowNode.depth == 0 && primaryCall { + split := strings.Split(now, ":") + if len(split) != 2 { + return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %v", now, split) + } + algorithm := strings.ToLower(split[0]) + digest := strings.ToLower(split[1]) + + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{ + Artifact: &model.ArtifactSpec{ + Algorithm: &algorithm, + Digest: &digest, + }, + }}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) + } + } else { + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Artifact: &model.ArtifactSpec{Id: &now}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via artifact: %s with error :%w", now, err) + } + } + + for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { + if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { + if pkgResponse.Type != guacType { + if !checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { + vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) + if err != nil { + return nil, nil, fmt.Errorf("error querying neighbor: %v", err) + } + path = append(path, vulnPath...) + tableRows = append(tableRows, pkgVulnTableRows...) + path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, + pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, + pkgResponse.Id}, path...) + checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true + } + } + } + for _, isOcc := range hasSBOM.IncludedOccurrences { + if *isOcc.Subject.GetTypename() == guacType { + continue + } + var matchingArtifactIDs []string + matchingArtifactIDs = append(matchingArtifactIDs, isOcc.Artifact.Id) + + for _, artID := range matchingArtifactIDs { + dfsN, seen := nodeMap[artID] + if !seen { + dfsN = dfsNode{ + parent: now, + artID: artID, + depth: nowNode.depth + 1, } - path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, - result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, - result.isDep.Package.Id) + nodeMap[artID] = dfsN + } + if !dfsN.expanded { + queue = append(queue, artID) + } + artifactNeighbors, err := getVulnAndVexNeighborsForArtifact(ctx, gqlclient, artID, isOcc) + if err != nil { + return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForArtifact failed with error: %w", err) + } + collectedArtifactResults = append(collectedArtifactResults, artifactNeighbors) + checkedArtifactIDs[artID] = true + } + } + } + nowNode.expanded = true + nodeMap[now] = nowNode + } + + checkedCertifyVulnIDs := make(map[string]bool) + + // Collect results from the channel + for _, result := range collectedArtifactResults { + for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { + if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) } + path = append(path, result.isArt.Id, result.isArt.Artifact.Id) } }