From d3c5a8b2f9be2c38e9e122a27f1c5364e5d1836a Mon Sep 17 00:00:00 2001 From: nathannaveen <42319948+nathannaveen@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:55:31 -0500 Subject: [PATCH 1/7] Using artifacts attached to hasSBOMs Signed-off-by: nathannaveen <42319948+nathannaveen@users.noreply.github.com> --- cmd/guacone/cmd/vulnerability.go | 289 +++++++++++++++++++++++++++---- 1 file changed, 252 insertions(+), 37 deletions(-) diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index 00bc56529c..8018478329 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -22,6 +22,8 @@ import ( "os" "strings" + "github.com/guacsec/guac/internal/testing/ptrfrom" + "github.com/Khan/genqlient/graphql" model "github.com/guacsec/guac/pkg/assembler/clients/generated" "github.com/guacsec/guac/pkg/assembler/helpers" @@ -122,12 +124,86 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer var path []string var tableRows []table.Row - depVulnPath, depVulnTableRows, err := searchPkgViaHasSBOM(ctx, gqlclient, opts.searchString, opts.depth, opts.isPurl) - if err != nil { - logger.Fatalf("error searching via hasSBOM: %v", err) + if opts.isPurl { + depVulnPath, depVulnTableRows, err := searchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, true) + if err != nil { + logger.Fatalf("error searching via hasSBOM: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + + if len(depVulnPath) == 0 { + pkgInput, err := helpers.PurlToPkg(opts.searchString) + if err != nil { + logger.Fatalf("failed to parse PURL: %v", err) + } + + pkgQualifierFilter := []model.PackageQualifierSpec{} + for _, qualifier := range pkgInput.Qualifiers { + // to prevent https://github.com/golang/go/discussions/56010 + qualifier := qualifier + pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ + Key: qualifier.Key, + Value: &qualifier.Value, + }) + } + + pkgFilter := &model.PkgSpec{ + Type: &pkgInput.Type, + Namespace: pkgInput.Namespace, + Name: &pkgInput.Name, + Version: pkgInput.Version, + Subpath: pkgInput.Subpath, + Qualifiers: pkgQualifierFilter, + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Subject: &model.PackageOrSourceSpec{ + Package: pkgFilter, + }, + }) + + depVulnPath, depVulnTableRows, err = searchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) + if err != nil { + logger.Fatalf("error searching for SBOMs via artifact: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + } + } else { + depVulnPath, depVulnTableRows, err := searchForSBOMViaArtifact(ctx, gqlclient, opts.searchString, opts.depth, true) + if err != nil { + logger.Fatalf("error searching for SBOMs via artifact: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + + if len(depVulnPath) == 0 { + split := strings.Split(opts.searchString, ":") + if len(split) != 2 { + logger.Fatalf("failed to parse artifact. Needs to be in algorithm:digest form") + } + artifactFilter := model.ArtifactSpec{ + Algorithm: ptrfrom.String(strings.ToLower(split[0])), + Digest: ptrfrom.String(strings.ToLower(split[1])), + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Artifact: &artifactFilter, + }) + + depVulnPath, depVulnTableRows, err = searchForSBOMViaPkg(ctx, gqlclient, o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage).Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) + if err != nil { + logger.Fatalf("error searching via hasSBOM: %v", err) + } + + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) + } } - path = append(path, depVulnPath...) - tableRows = append(tableRows, depVulnTableRows...) if len(path) > 0 { t.AppendRows(tableRows) @@ -430,7 +506,12 @@ type pkgVersionNeighborQueryResults struct { isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency } -func getVulnAndVexNeighbors(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { +type artifactVersionNeighborQueryResults struct { + pkgVersionNeighborResponse *model.NeighborsResponse + isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence +} + +func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement}) if err != nil { return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) @@ -438,10 +519,18 @@ func getVulnAndVexNeighbors(ctx context.Context, gqlclient graphql.Client, pkgID return &pkgVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isDep: isDep}, nil } -// searchPkgViaHasSBOM takes in either a purl or URI for the initial value to find the hasSBOM node. +func getVulnAndVexNeighborsForArtifact(ctx context.Context, gqlclient graphql.Client, pkgID string, isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence) (*artifactVersionNeighborQueryResults, error) { + pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgeArtifactCertifyVexStatement}) + if err != nil { + return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) + } + return &artifactVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isArt: isArt}, nil +} + +// searchForSBOMViaPkg takes in either a purl or URI for the initial value to find the hasSBOM node. // From there is recursively searches through all the dependencies to determine if it contains hasSBOM nodes. // It concurrent checks the package version node if it contains vulnerabilities and VEX data. -func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, isPurl bool) ([]string, []table.Row, error) { +func searchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { var path []string var tableRows []table.Row checkedPkgIDs := make(map[string]bool) @@ -471,22 +560,15 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt var foundHasSBOMPkg *model.HasSBOMsResponse var err error - // if the initial depth, check if its a purl or an SBOM URI. Otherwise always search by pkgID - if nowNode.depth == 0 { - if isPurl { - pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) - if err != nil { - return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) - } - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) - } - } else { - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Uri: &now}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) - } + // if the initial depth, check if it's a purl or an SBOM URI. Otherwise, always search by pkgID + if nowNode.depth == 0 && primaryCall { + pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) + if err != nil { + return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) + } + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) } } else { foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &now}}}) @@ -529,7 +611,7 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt if !dfsN.expanded { queue = append(queue, depPkgID) } - pkgVersionNeighbors, err := getVulnAndVexNeighbors(ctx, gqlclient, depPkgID, isDep) + pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, depPkgID, isDep) if err != nil { return nil, nil, fmt.Errorf("getVulnAndVexNeighbors failed with error: %w", err) } @@ -548,20 +630,153 @@ func searchPkgViaHasSBOM(ctx context.Context, gqlclient graphql.Client, searchSt for _, result := range collectedPkgVersionResults { for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - if !checkedCertifyVulnIDs[certifyVuln.Vulnerability.VulnerabilityIDs[0].Id] { - if certifyVuln.Vulnerability.Type != noVulnType { - checkedCertifyVulnIDs[certifyVuln.Vulnerability.VulnerabilityIDs[0].Id] = true - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) + } + path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, + result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, + result.isDep.Package.Id) + } + } + + if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { + for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) + path = append(path, certifyVex.Id, vuln.Id) + } + path = append(path, vexSubjectIds(certifyVex.Subject)...) + } + } + } + return path, tableRows, nil +} + +func searchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { + var path []string + var tableRows []table.Row + checkedArtifactIDs := make(map[string]bool) + var collectedArtifactResults []*artifactVersionNeighborQueryResults + + queue := make([]string, 0) // the queue of nodes in bfs + type dfsNode struct { + expanded bool // true once all node neighbors are added to queue + parent string + artID string + depth int + } + nodeMap := map[string]dfsNode{} + + nodeMap[searchString] = dfsNode{} + queue = append(queue, searchString) + + for len(queue) > 0 { + now := queue[0] + queue = queue[1:] + nowNode := nodeMap[now] + + if maxLength != 0 && nowNode.depth >= maxLength { + break + } + + var foundHasSBOMPkg *model.HasSBOMsResponse + var err error + + if nowNode.depth == 0 && primaryCall { + split := strings.Split(now, ":") + if len(split) != 2 { + return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %v", now, split) + } + algorithm := strings.ToLower(split[0]) + digest := strings.ToLower(split[1]) + + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{ + Artifact: &model.ArtifactSpec{ + Algorithm: &algorithm, + Digest: &digest, + }, + }}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) + } + } else { + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Artifact: &model.ArtifactSpec{Id: &now}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via artifact: %s with error :%w", now, err) + } + } + + for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { + if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { + if pkgResponse.Type != guacType { + if !checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { + vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) + if err != nil { + return nil, nil, fmt.Errorf("error querying neighbor: %v", err) } - path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, - result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, - result.isDep.Package.Id) + path = append(path, vulnPath...) + tableRows = append(tableRows, pkgVulnTableRows...) + path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, + pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, + pkgResponse.Id}, path...) + checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true + } + } + } + for _, isOcc := range hasSBOM.IncludedOccurrences { + if *isOcc.Subject.GetTypename() == guacType { + continue + } + var matchingArtifactIDs []string + matchingArtifactIDs = append(matchingArtifactIDs, isOcc.Artifact.Id) + + for _, artID := range matchingArtifactIDs { + dfsN, seen := nodeMap[artID] + if !seen { + dfsN = dfsNode{ + parent: now, + artID: artID, + depth: nowNode.depth + 1, + } + nodeMap[artID] = dfsN + } + if !dfsN.expanded { + queue = append(queue, artID) + } + artifactNeighbors, err := getVulnAndVexNeighborsForArtifact(ctx, gqlclient, artID, isOcc) + if err != nil { + return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForArtifact failed with error: %w", err) + } + collectedArtifactResults = append(collectedArtifactResults, artifactNeighbors) + checkedArtifactIDs[artID] = true + } + } + } + nowNode.expanded = true + nodeMap[now] = nowNode + } + + checkedCertifyVulnIDs := make(map[string]bool) + + // Collect results from the channel + for _, result := range collectedArtifactResults { + for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { + if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) } + path = append(path, result.isArt.Id, result.isArt.Artifact.Id) } } From c8e7e526719430447bbe73134b846a361402f93f Mon Sep 17 00:00:00 2001 From: nathannaveen <42319948+nathannaveen@users.noreply.github.com> Date: Wed, 12 Jun 2024 15:34:17 -0500 Subject: [PATCH 2/7] Fixed lint issues Signed-off-by: nathannaveen <42319948+nathannaveen@users.noreply.github.com> --- cmd/guacone/cmd/vulnerability.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index 8018478329..6ee92532ea 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -163,6 +163,9 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer Package: pkgFilter, }, }) + if err != nil { + logger.Fatalf("error querying for occurrences: %v", err) + } depVulnPath, depVulnTableRows, err = searchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) if err != nil { @@ -194,8 +197,15 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ Artifact: &artifactFilter, }) + if err != nil { + logger.Fatalf("error querying for occurrences: %v", err) + } - depVulnPath, depVulnTableRows, err = searchForSBOMViaPkg(ctx, gqlclient, o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage).Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) + subjectPackage, ok := o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) + if !ok { + logger.Fatalf("error asserting type for Subject as *model.AllIsOccurrencesTreeSubjectPackage") + } + depVulnPath, depVulnTableRows, err = searchForSBOMViaPkg(ctx, gqlclient, subjectPackage.Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) if err != nil { logger.Fatalf("error searching via hasSBOM: %v", err) } From 52bfb79131743ccbffbe348526aa39f1aa2f11cd Mon Sep 17 00:00:00 2001 From: nathannaveen <42319948+nathannaveen@users.noreply.github.com> Date: Thu, 18 Jul 2024 15:02:48 -0500 Subject: [PATCH 3/7] Updated based on code review Signed-off-by: nathannaveen <42319948+nathannaveen@users.noreply.github.com> --- cmd/guacone/cmd/vulnerability.go | 478 +++++------------------------ pkg/guacanalytics/searchForSBOM.go | 427 ++++++++++++++++++++++++++ 2 files changed, 504 insertions(+), 401 deletions(-) create mode 100644 pkg/guacanalytics/searchForSBOM.go diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index 6ee92532ea..aa33c56a49 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -18,6 +18,8 @@ package cmd import ( "context" "fmt" + "github.com/guacsec/guac/pkg/guacanalytics" + "go.uber.org/zap" "net/http" "os" "strings" @@ -125,7 +127,9 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer var tableRows []table.Row if opts.isPurl { - depVulnPath, depVulnTableRows, err := searchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, true) + // The primaryCall parameter in searchForSBOMViaPkg is there for us to know whether + // the searchString is expected to be a PURL, and we are searching via a purl. + depVulnPath, depVulnTableRows, err := guacanalytics.SearchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, true) if err != nil { logger.Fatalf("error searching via hasSBOM: %v", err) } @@ -134,40 +138,11 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer tableRows = append(tableRows, depVulnTableRows...) if len(depVulnPath) == 0 { - pkgInput, err := helpers.PurlToPkg(opts.searchString) - if err != nil { - logger.Fatalf("failed to parse PURL: %v", err) - } - - pkgQualifierFilter := []model.PackageQualifierSpec{} - for _, qualifier := range pkgInput.Qualifiers { - // to prevent https://github.com/golang/go/discussions/56010 - qualifier := qualifier - pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ - Key: qualifier.Key, - Value: &qualifier.Value, - }) - } + err, o := searchStringToOccurrence(ctx, gqlclient, opts, logger) - pkgFilter := &model.PkgSpec{ - Type: &pkgInput.Type, - Namespace: pkgInput.Namespace, - Name: &pkgInput.Name, - Version: pkgInput.Version, - Subpath: pkgInput.Subpath, - Qualifiers: pkgQualifierFilter, - } - - o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ - Subject: &model.PackageOrSourceSpec{ - Package: pkgFilter, - }, - }) - if err != nil { - logger.Fatalf("error querying for occurrences: %v", err) - } - - depVulnPath, depVulnTableRows, err = searchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) + // The primaryCall parameter in searchForSBOMViaArtifact is there for us to know that + // the searchString is expected to be a PURL, but isn't, so we have to check via artifacts instead of PURLs. + depVulnPath, depVulnTableRows, err = guacanalytics.SearchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) if err != nil { logger.Fatalf("error searching for SBOMs via artifact: %v", err) } @@ -176,7 +151,9 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer tableRows = append(tableRows, depVulnTableRows...) } } else { - depVulnPath, depVulnTableRows, err := searchForSBOMViaArtifact(ctx, gqlclient, opts.searchString, opts.depth, true) + // The primaryCall parameter in searchForSBOMViaArtifact is there for us to know that + // the searchString isn't expected to be a PURL, so we have to check artifacts. + depVulnPath, depVulnTableRows, err := guacanalytics.SearchForSBOMViaArtifact(ctx, gqlclient, opts.searchString, opts.depth, true) if err != nil { logger.Fatalf("error searching for SBOMs via artifact: %v", err) } @@ -185,27 +162,11 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer tableRows = append(tableRows, depVulnTableRows...) if len(depVulnPath) == 0 { - split := strings.Split(opts.searchString, ":") - if len(split) != 2 { - logger.Fatalf("failed to parse artifact. Needs to be in algorithm:digest form") - } - artifactFilter := model.ArtifactSpec{ - Algorithm: ptrfrom.String(strings.ToLower(split[0])), - Digest: ptrfrom.String(strings.ToLower(split[1])), - } - - o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ - Artifact: &artifactFilter, - }) - if err != nil { - logger.Fatalf("error querying for occurrences: %v", err) - } + err, subjectPackage := searchStringToPkg(ctx, gqlclient, opts, logger) - subjectPackage, ok := o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) - if !ok { - logger.Fatalf("error asserting type for Subject as *model.AllIsOccurrencesTreeSubjectPackage") - } - depVulnPath, depVulnTableRows, err = searchForSBOMViaPkg(ctx, gqlclient, subjectPackage.Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) + // The primaryCall parameter in searchForSBOMViaPkg is there for us to know that + // the searchString is expected to be an artifact, but isn't, so we have to check via PURLs instead of artifacts. + depVulnPath, depVulnTableRows, err = guacanalytics.SearchForSBOMViaPkg(ctx, gqlclient, subjectPackage.Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) if err != nil { logger.Fatalf("error searching via hasSBOM: %v", err) } @@ -225,6 +186,66 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer } } +func searchStringToPkg(ctx context.Context, gqlclient graphql.Client, opts queryOptions, logger *zap.SugaredLogger) (error, *model.AllIsOccurrencesTreeSubjectPackage) { + split := strings.Split(opts.searchString, ":") + if len(split) != 2 { + logger.Fatalf("failed to parse artifact. Needs to be in algorithm:digest form") + } + artifactFilter := model.ArtifactSpec{ + Algorithm: ptrfrom.String(strings.ToLower(split[0])), + Digest: ptrfrom.String(strings.ToLower(split[1])), + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Artifact: &artifactFilter, + }) + if err != nil { + logger.Fatalf("error querying for occurrences: %v", err) + } + + subjectPackage, ok := o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) + if !ok { + logger.Fatalf("error asserting type for Subject as *model.AllIsOccurrencesTreeSubjectPackage") + } + return err, subjectPackage +} + +func searchStringToOccurrence(ctx context.Context, gqlclient graphql.Client, opts queryOptions, logger *zap.SugaredLogger) (error, *model.OccurrencesResponse) { + pkgInput, err := helpers.PurlToPkg(opts.searchString) + if err != nil { + logger.Fatalf("failed to parse PURL: %v", err) + } + + pkgQualifierFilter := []model.PackageQualifierSpec{} + for _, qualifier := range pkgInput.Qualifiers { + // to prevent https://github.com/golang/go/discussions/56010 + qualifier := qualifier + pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ + Key: qualifier.Key, + Value: &qualifier.Value, + }) + } + + pkgFilter := &model.PkgSpec{ + Type: &pkgInput.Type, + Namespace: pkgInput.Namespace, + Name: &pkgInput.Name, + Version: pkgInput.Version, + Subpath: pkgInput.Subpath, + Qualifiers: pkgQualifierFilter, + } + + o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ + Subject: &model.PackageOrSourceSpec{ + Package: pkgFilter, + }, + }) + if err != nil { + logger.Fatalf("error querying for occurrences: %v", err) + } + return err, o +} + func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t table.Writer, opts queryOptions) { logger := logging.FromContext(ctx) var tableRows []table.Row @@ -275,60 +296,6 @@ func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t tabl } } -func queryVulnsViaPackageNeighbors(ctx context.Context, gqlclient graphql.Client, pkgVersionID string) ([]string, []table.Row, error) { - var path []string - var tableRows []table.Row - var edgeTypes = []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement} - - pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgVersionID, edgeTypes) - if err != nil { - return nil, nil, fmt.Errorf("error querying neighbor for vulnerability: %w", err) - } - certifyVulnFound := false - for _, neighbor := range pkgVersionNeighborResponse.Neighbors { - if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - certifyVulnFound = true - if certifyVuln.Vulnerability.Type != noVulnType { - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) - } - } - } - - if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { - for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) - path = append(path, certifyVex.Id, vuln.Id) - } - path = append(path, vexSubjectIds(certifyVex.Subject)...) - } - - } - if !certifyVulnFound { - return nil, nil, fmt.Errorf("error certify vulnerability node not found, incomplete data. Please ensure certifier has run by running guacone certifier osv") - } - return path, tableRows, nil -} - -func vexSubjectString(s model.AllCertifyVEXStatementSubjectPackageOrArtifact) string { - switch v := s.(type) { - case *model.AllCertifyVEXStatementSubjectArtifact: - return fmt.Sprintf("artifact (id:%v) %v:%v", v.Id, v.Algorithm, v.Digest) - case *model.AllCertifyVEXStatementSubjectPackage: - return fmt.Sprintf("package (id:%v) %v:%v/%v@%v", - v.Id, - v.Type, - v.Namespaces[0].Namespace, - v.Namespaces[0].Names[0].Name, - v.Namespaces[0].Names[0].Versions[0].Version) - default: - return "unknown subject" - } -} func vexSubjectIds(s model.AllCertifyVEXStatementSubjectPackageOrArtifact) []string { switch v := s.(type) { case *model.AllCertifyVEXStatementSubjectArtifact: @@ -392,7 +359,7 @@ func queryVulnsViaVulnNodeNeighbors(ctx context.Context, gqlclient graphql.Clien if certifyVex, ok := neighbor.node.(*model.NeighborsNeighborsCertifyVEXStatement); ok { certifyVulnFound = true for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + guacanalytics.VexSubjectString(certifyVex.Subject)}) path = append(path, certifyVex.Id, vuln.Id) } path = append(path, vexSubjectIds(certifyVex.Subject)...) @@ -511,297 +478,6 @@ func searchDependencyPackagesReverse(ctx context.Context, gqlclient graphql.Clie } } -type pkgVersionNeighborQueryResults struct { - pkgVersionNeighborResponse *model.NeighborsResponse - isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency -} - -type artifactVersionNeighborQueryResults struct { - pkgVersionNeighborResponse *model.NeighborsResponse - isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence -} - -func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { - pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement}) - if err != nil { - return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) - } - return &pkgVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isDep: isDep}, nil -} - -func getVulnAndVexNeighborsForArtifact(ctx context.Context, gqlclient graphql.Client, pkgID string, isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence) (*artifactVersionNeighborQueryResults, error) { - pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgeArtifactCertifyVexStatement}) - if err != nil { - return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) - } - return &artifactVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isArt: isArt}, nil -} - -// searchForSBOMViaPkg takes in either a purl or URI for the initial value to find the hasSBOM node. -// From there is recursively searches through all the dependencies to determine if it contains hasSBOM nodes. -// It concurrent checks the package version node if it contains vulnerabilities and VEX data. -func searchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { - var path []string - var tableRows []table.Row - checkedPkgIDs := make(map[string]bool) - var collectedPkgVersionResults []*pkgVersionNeighborQueryResults - - queue := make([]string, 0) // the queue of nodes in bfs - type dfsNode struct { - expanded bool // true once all node neighbors are added to queue - parent string - pkgID string - depth int - } - nodeMap := map[string]dfsNode{} - - nodeMap[searchString] = dfsNode{} - queue = append(queue, searchString) - - for len(queue) > 0 { - now := queue[0] - queue = queue[1:] - nowNode := nodeMap[now] - - if maxLength != 0 && nowNode.depth >= maxLength { - break - } - - var foundHasSBOMPkg *model.HasSBOMsResponse - var err error - - // if the initial depth, check if it's a purl or an SBOM URI. Otherwise, always search by pkgID - if nowNode.depth == 0 && primaryCall { - pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) - if err != nil { - return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) - } - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) - } - } else { - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &now}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) - } - } - - for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { - if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { - if pkgResponse.Type != guacType { - if !checkedPkgIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { - vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) - if err != nil { - return nil, nil, fmt.Errorf("error querying neighbor: %v", err) - } - path = append(path, vulnPath...) - tableRows = append(tableRows, pkgVulnTableRows...) - path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, - pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, - pkgResponse.Id}, path...) - checkedPkgIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true - } - } - } - for _, isDep := range hasSBOM.IncludedDependencies { - if isDep.DependencyPackage.Type == guacType { - continue - } - depPkgID := isDep.DependencyPackage.Namespaces[0].Names[0].Versions[0].Id - dfsN, seen := nodeMap[depPkgID] - if !seen { - dfsN = dfsNode{ - parent: now, - pkgID: depPkgID, - depth: nowNode.depth + 1, - } - nodeMap[depPkgID] = dfsN - } - if !dfsN.expanded { - queue = append(queue, depPkgID) - } - pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, depPkgID, isDep) - if err != nil { - return nil, nil, fmt.Errorf("getVulnAndVexNeighbors failed with error: %w", err) - } - collectedPkgVersionResults = append(collectedPkgVersionResults, pkgVersionNeighbors) - checkedPkgIDs[depPkgID] = true - - } - } - nowNode.expanded = true - nodeMap[now] = nowNode - } - - checkedCertifyVulnIDs := make(map[string]bool) - - // Collect results from the channel - for _, result := range collectedPkgVersionResults { - for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { - if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { - checkedCertifyVulnIDs[certifyVuln.Id] = true - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) - } - path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, - result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, - result.isDep.Package.Id) - } - } - - if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { - for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) - path = append(path, certifyVex.Id, vuln.Id) - } - path = append(path, vexSubjectIds(certifyVex.Subject)...) - } - } - } - return path, tableRows, nil -} - -func searchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { - var path []string - var tableRows []table.Row - checkedArtifactIDs := make(map[string]bool) - var collectedArtifactResults []*artifactVersionNeighborQueryResults - - queue := make([]string, 0) // the queue of nodes in bfs - type dfsNode struct { - expanded bool // true once all node neighbors are added to queue - parent string - artID string - depth int - } - nodeMap := map[string]dfsNode{} - - nodeMap[searchString] = dfsNode{} - queue = append(queue, searchString) - - for len(queue) > 0 { - now := queue[0] - queue = queue[1:] - nowNode := nodeMap[now] - - if maxLength != 0 && nowNode.depth >= maxLength { - break - } - - var foundHasSBOMPkg *model.HasSBOMsResponse - var err error - - if nowNode.depth == 0 && primaryCall { - split := strings.Split(now, ":") - if len(split) != 2 { - return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %v", now, split) - } - algorithm := strings.ToLower(split[0]) - digest := strings.ToLower(split[1]) - - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{ - Artifact: &model.ArtifactSpec{ - Algorithm: &algorithm, - Digest: &digest, - }, - }}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) - } - } else { - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Artifact: &model.ArtifactSpec{Id: &now}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via artifact: %s with error :%w", now, err) - } - } - - for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { - if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { - if pkgResponse.Type != guacType { - if !checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { - vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) - if err != nil { - return nil, nil, fmt.Errorf("error querying neighbor: %v", err) - } - path = append(path, vulnPath...) - tableRows = append(tableRows, pkgVulnTableRows...) - path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, - pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, - pkgResponse.Id}, path...) - checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true - } - } - } - for _, isOcc := range hasSBOM.IncludedOccurrences { - if *isOcc.Subject.GetTypename() == guacType { - continue - } - var matchingArtifactIDs []string - matchingArtifactIDs = append(matchingArtifactIDs, isOcc.Artifact.Id) - - for _, artID := range matchingArtifactIDs { - dfsN, seen := nodeMap[artID] - if !seen { - dfsN = dfsNode{ - parent: now, - artID: artID, - depth: nowNode.depth + 1, - } - nodeMap[artID] = dfsN - } - if !dfsN.expanded { - queue = append(queue, artID) - } - artifactNeighbors, err := getVulnAndVexNeighborsForArtifact(ctx, gqlclient, artID, isOcc) - if err != nil { - return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForArtifact failed with error: %w", err) - } - collectedArtifactResults = append(collectedArtifactResults, artifactNeighbors) - checkedArtifactIDs[artID] = true - } - } - } - nowNode.expanded = true - nodeMap[now] = nowNode - } - - checkedCertifyVulnIDs := make(map[string]bool) - - // Collect results from the channel - for _, result := range collectedArtifactResults { - for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { - if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { - checkedCertifyVulnIDs[certifyVuln.Id] = true - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) - } - path = append(path, result.isArt.Id, result.isArt.Artifact.Id) - } - } - - if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { - for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + vexSubjectString(certifyVex.Subject)}) - path = append(path, certifyVex.Id, vuln.Id) - } - path = append(path, vexSubjectIds(certifyVex.Subject)...) - } - } - } - return path, tableRows, nil -} - func removeDuplicateValuesFromPath(path []string) []string { keys := make(map[string]bool) var list []string diff --git a/pkg/guacanalytics/searchForSBOM.go b/pkg/guacanalytics/searchForSBOM.go new file mode 100644 index 0000000000..0844340157 --- /dev/null +++ b/pkg/guacanalytics/searchForSBOM.go @@ -0,0 +1,427 @@ +package guacanalytics + +import ( + "context" + "fmt" + "github.com/Khan/genqlient/graphql" + model "github.com/guacsec/guac/pkg/assembler/clients/generated" + "github.com/guacsec/guac/pkg/assembler/helpers" + "github.com/guacsec/guac/pkg/dependencies" + "github.com/jedib0t/go-pretty/v6/table" + "strings" +) + +const ( + guacType string = "guac" + noVulnType string = "novuln" + vexLinkStr string = "vexLink" + certifyVulnStr string = "certifyVuln" +) + +type pkgVersionNeighborQueryResults struct { + pkgVersionNeighborResponse *model.NeighborsResponse + isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency +} + +type artifactVersionNeighborQueryResults struct { + pkgVersionNeighborResponse *model.NeighborsResponse + isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence +} + +func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { + pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement}) + if err != nil { + return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) + } + return &pkgVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isDep: isDep}, nil +} + +func getVulnAndVexNeighborsForArtifact(ctx context.Context, gqlclient graphql.Client, pkgID string, isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence) (*artifactVersionNeighborQueryResults, error) { + pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgeArtifactCertifyVexStatement}) + if err != nil { + return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) + } + return &artifactVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isArt: isArt}, nil +} + +// SearchForSBOMViaPkg takes in either a purl or URI for the initial value to find the hasSBOM node. +// From there is recursively searches through all the dependencies to determine if it contains hasSBOM nodes. +// It concurrent checks the package version node if it contains vulnerabilities and VEX data. +// The primaryCall parameter is used to know whether the searchString is expected to be a PURL. +func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { + var path []string + var tableRows []table.Row + checkedPkgIDs := make(map[string]bool) + var collectedPkgVersionResults []*pkgVersionNeighborQueryResults + + queue := make([]string, 0) // the queue of nodes in bfs + type dfsNode struct { + expanded bool // true once all node neighbors are added to queue + parent string + pkgID string + depth int + } + nodeMap := map[string]dfsNode{} + + nodeMap[searchString] = dfsNode{} + queue = append(queue, searchString) + + for len(queue) > 0 { + now := queue[0] + queue = queue[1:] + nowNode := nodeMap[now] + + if maxLength != 0 && nowNode.depth >= maxLength { + break + } + + var foundHasSBOMPkg *model.HasSBOMsResponse + var err error + + // if the initial depth, check if it's a purl or an SBOM URI. Otherwise, always search by pkgID + // note that primaryCall will be static throughout the entire function. + if nowNode.depth == 0 && primaryCall { + pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) + if err != nil { + return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %w", err) + } + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) + } + } else { + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &now}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) + } + } + + for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { + if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { + if pkgResponse.Type != guacType { + if !checkedPkgIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { + vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) + if err != nil { + return nil, nil, fmt.Errorf("error querying neighbor: %w", err) + } + path = append(path, vulnPath...) + tableRows = append(tableRows, pkgVulnTableRows...) + path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, + pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, + pkgResponse.Id}, path...) + checkedPkgIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true + } + } + } + for _, isDep := range hasSBOM.IncludedDependencies { + if isDep.DependencyPackage.Type == guacType { + continue + } + var matchingDepPkgVersionIDs []string + if len(isDep.DependencyPackage.Namespaces[0].Names[0].Versions) == 0 { + findMatchingDepPkgVersionIDs, err := dependencies.FindDepPkgVersionIDs(ctx, gqlclient, isDep.DependencyPackage.Type, isDep.DependencyPackage.Namespaces[0].Namespace, + isDep.DependencyPackage.Namespaces[0].Names[0].Name, isDep.VersionRange) + if err != nil { + return nil, nil, fmt.Errorf("error from FindMatchingDepPkgVersionIDs:%w", err) + } + matchingDepPkgVersionIDs = append(matchingDepPkgVersionIDs, findMatchingDepPkgVersionIDs...) + } else { + matchingDepPkgVersionIDs = append(matchingDepPkgVersionIDs, isDep.DependencyPackage.Namespaces[0].Names[0].Versions[0].Id) + } + for _, pkgID := range matchingDepPkgVersionIDs { + dfsN, seen := nodeMap[pkgID] + if !seen { + dfsN = dfsNode{ + parent: now, + pkgID: pkgID, + depth: nowNode.depth + 1, + } + nodeMap[pkgID] = dfsN + } + if !dfsN.expanded { + queue = append(queue, pkgID) + } + pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, pkgID, isDep) + if err != nil { + return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForPackage failed with error: %w", err) + } + collectedPkgVersionResults = append(collectedPkgVersionResults, pkgVersionNeighbors) + checkedPkgIDs[pkgID] = true + } + } + } + nowNode.expanded = true + nodeMap[now] = nowNode + } + + checkedCertifyVulnIDs := make(map[string]bool) + + // Collect results from the channel + for _, result := range collectedPkgVersionResults { + for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { + if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) + } + path = append(path, result.isDep.Id, result.isDep.Package.Namespaces[0].Names[0].Versions[0].Id, + result.isDep.Package.Namespaces[0].Names[0].Id, result.isDep.Package.Namespaces[0].Id, + result.isDep.Package.Id) + } + } + + if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { + for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + VexSubjectString(certifyVex.Subject)}) + path = append(path, certifyVex.Id, vuln.Id) + } + path = append(path, vexSubjectIds(certifyVex.Subject)...) + } + } + } + return path, tableRows, nil +} + +func SearchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { + var path []string + var tableRows []table.Row + checkedArtifactIDs := make(map[string]bool) + var collectedArtifactResults []*artifactVersionNeighborQueryResults + + queue := make([]string, 0) // the queue of nodes in bfs + type dfsNode struct { + expanded bool // true once all node neighbors are added to queue + parent string + artID string + depth int + } + nodeMap := map[string]dfsNode{} + + nodeMap[searchString] = dfsNode{} + queue = append(queue, searchString) + + for len(queue) > 0 { + now := queue[0] + queue = queue[1:] + nowNode := nodeMap[now] + + if maxLength != 0 && nowNode.depth >= maxLength { + break + } + + var foundHasSBOMPkg *model.HasSBOMsResponse + var err error + + if nowNode.depth == 0 && primaryCall { + split := strings.Split(now, ":") + if len(split) != 2 { + return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %w", now, split) + } + algorithm := strings.ToLower(split[0]) + digest := strings.ToLower(split[1]) + + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{ + Artifact: &model.ArtifactSpec{ + Algorithm: &algorithm, + Digest: &digest, + }, + }}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) + } + } else { + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Artifact: &model.ArtifactSpec{Id: &now}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via artifact: %s with error :%w", now, err) + } + } + + for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { + if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { + if pkgResponse.Type != guacType { + if !checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { + vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) + if err != nil { + return nil, nil, fmt.Errorf("error querying neighbor: %w", err) + } + path = append(path, vulnPath...) + tableRows = append(tableRows, pkgVulnTableRows...) + path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, + pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, + pkgResponse.Id}, path...) + checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true + } + } + } + for _, isOcc := range hasSBOM.IncludedOccurrences { + if *isOcc.Subject.GetTypename() == guacType { + continue + } + var matchingArtifactIDs []string + matchingArtifactIDs = append(matchingArtifactIDs, isOcc.Artifact.Id) + + for _, artID := range matchingArtifactIDs { + dfsN, seen := nodeMap[artID] + if !seen { + dfsN = dfsNode{ + parent: now, + artID: artID, + depth: nowNode.depth + 1, + } + nodeMap[artID] = dfsN + } + if !dfsN.expanded { + queue = append(queue, artID) + } + artifactNeighbors, err := getVulnAndVexNeighborsForArtifact(ctx, gqlclient, artID, isOcc) + if err != nil { + return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForArtifact failed with error: %w", err) + } + collectedArtifactResults = append(collectedArtifactResults, artifactNeighbors) + checkedArtifactIDs[artID] = true + } + } + } + nowNode.expanded = true + nodeMap[now] = nowNode + } + + checkedCertifyVulnIDs := make(map[string]bool) + + // Collect results from the channel + for _, result := range collectedArtifactResults { + for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { + if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { + if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { + checkedCertifyVulnIDs[certifyVuln.Id] = true + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) + } + path = append(path, result.isArt.Id, result.isArt.Artifact.Id) + } + } + + if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { + for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + VexSubjectString(certifyVex.Subject)}) + path = append(path, certifyVex.Id, vuln.Id) + } + path = append(path, vexSubjectIds(certifyVex.Subject)...) + } + } + } + return path, tableRows, nil +} + +func getPkgResponseFromPurl(ctx context.Context, gqlclient graphql.Client, purl string) (*model.PackagesResponse, error) { + pkgInput, err := helpers.PurlToPkg(purl) + if err != nil { + return nil, fmt.Errorf("failed to parse PURL: %v", err) + } + + pkgQualifierFilter := []model.PackageQualifierSpec{} + for _, qualifier := range pkgInput.Qualifiers { + // to prevent https://github.com/golang/go/discussions/56010 + qualifier := qualifier + pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ + Key: qualifier.Key, + Value: &qualifier.Value, + }) + } + + pkgFilter := &model.PkgSpec{ + Type: &pkgInput.Type, + Namespace: pkgInput.Namespace, + Name: &pkgInput.Name, + Version: pkgInput.Version, + Subpath: pkgInput.Subpath, + Qualifiers: pkgQualifierFilter, + } + pkgResponse, err := model.Packages(ctx, gqlclient, *pkgFilter) + if err != nil { + return nil, fmt.Errorf("error querying for package: %v", err) + } + if len(pkgResponse.Packages) != 1 { + return nil, fmt.Errorf("failed to located package based on purl") + } + return pkgResponse, nil +} + +func vexSubjectIds(s model.AllCertifyVEXStatementSubjectPackageOrArtifact) []string { + switch v := s.(type) { + case *model.AllCertifyVEXStatementSubjectArtifact: + return []string{v.Id} + case *model.AllCertifyVEXStatementSubjectPackage: + return []string{ + v.Id, + v.Namespaces[0].Id, + v.Namespaces[0].Names[0].Id, + v.Namespaces[0].Names[0].Versions[0].Id} + default: + return []string{} + } +} + +func queryVulnsViaPackageNeighbors(ctx context.Context, gqlclient graphql.Client, pkgVersionID string) ([]string, []table.Row, error) { + var path []string + var tableRows []table.Row + var edgeTypes = []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement} + + pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgVersionID, edgeTypes) + if err != nil { + return nil, nil, fmt.Errorf("error querying neighbor for vulnerability: %w", err) + } + certifyVulnFound := false + for _, neighbor := range pkgVersionNeighborResponse.Neighbors { + if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { + certifyVulnFound = true + if certifyVuln.Vulnerability.Type != noVulnType { + for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) + path = append(path, []string{vuln.Id, certifyVuln.Id, + certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, + certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, + certifyVuln.Package.Id}...) + } + } + } + + if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { + for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { + tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + VexSubjectString(certifyVex.Subject)}) + path = append(path, certifyVex.Id, vuln.Id) + } + path = append(path, vexSubjectIds(certifyVex.Subject)...) + } + + } + if !certifyVulnFound { + return nil, nil, fmt.Errorf("error certify vulnerability node not found, incomplete data. Please ensure certifier has run by running guacone certifier osv") + } + return path, tableRows, nil +} + +func VexSubjectString(s model.AllCertifyVEXStatementSubjectPackageOrArtifact) string { + switch v := s.(type) { + case *model.AllCertifyVEXStatementSubjectArtifact: + return fmt.Sprintf("artifact (id:%v) %v:%v", v.Id, v.Algorithm, v.Digest) + case *model.AllCertifyVEXStatementSubjectPackage: + return fmt.Sprintf("package (id:%v) %v:%v/%v@%v", + v.Id, + v.Type, + v.Namespaces[0].Namespace, + v.Namespaces[0].Names[0].Name, + v.Namespaces[0].Names[0].Versions[0].Version) + default: + return "unknown subject" + } +} From 9ebefe0b0c8319a043642324974463e0b54e259b Mon Sep 17 00:00:00 2001 From: nathannaveen <42319948+nathannaveen@users.noreply.github.com> Date: Thu, 18 Jul 2024 16:24:31 -0500 Subject: [PATCH 4/7] Fix tests Signed-off-by: nathannaveen <42319948+nathannaveen@users.noreply.github.com> --- cmd/guacone/cmd/vulnerability.go | 6 +++ pkg/guacanalytics/searchForSBOM.go | 62 ++++++++++++++++-------------- 2 files changed, 39 insertions(+), 29 deletions(-) diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index aa33c56a49..dc435cc839 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -139,6 +139,9 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer if len(depVulnPath) == 0 { err, o := searchStringToOccurrence(ctx, gqlclient, opts, logger) + if err != nil { + logger.Fatalf("error searching for occurrence: %v", err) + } // The primaryCall parameter in searchForSBOMViaArtifact is there for us to know that // the searchString is expected to be a PURL, but isn't, so we have to check via artifacts instead of PURLs. @@ -163,6 +166,9 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer if len(depVulnPath) == 0 { err, subjectPackage := searchStringToPkg(ctx, gqlclient, opts, logger) + if err != nil { + logger.Fatalf("error searching for packages: %v", err) + } // The primaryCall parameter in searchForSBOMViaPkg is there for us to know that // the searchString is expected to be an artifact, but isn't, so we have to check via PURLs instead of artifacts. diff --git a/pkg/guacanalytics/searchForSBOM.go b/pkg/guacanalytics/searchForSBOM.go index 0844340157..70bbd4fb86 100644 --- a/pkg/guacanalytics/searchForSBOM.go +++ b/pkg/guacanalytics/searchForSBOM.go @@ -6,7 +6,6 @@ import ( "github.com/Khan/genqlient/graphql" model "github.com/guacsec/guac/pkg/assembler/clients/generated" "github.com/guacsec/guac/pkg/assembler/helpers" - "github.com/guacsec/guac/pkg/dependencies" "github.com/jedib0t/go-pretty/v6/table" "strings" ) @@ -117,36 +116,41 @@ func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchSt if isDep.DependencyPackage.Type == guacType { continue } - var matchingDepPkgVersionIDs []string - if len(isDep.DependencyPackage.Namespaces[0].Names[0].Versions) == 0 { - findMatchingDepPkgVersionIDs, err := dependencies.FindDepPkgVersionIDs(ctx, gqlclient, isDep.DependencyPackage.Type, isDep.DependencyPackage.Namespaces[0].Namespace, - isDep.DependencyPackage.Namespaces[0].Names[0].Name, isDep.VersionRange) - if err != nil { - return nil, nil, fmt.Errorf("error from FindMatchingDepPkgVersionIDs:%w", err) + depPkgID := isDep.DependencyPackage.Namespaces[0].Names[0].Versions[0].Id + dfsN, seen := nodeMap[depPkgID] + if !seen { + dfsN = dfsNode{ + parent: now, + pkgID: depPkgID, + depth: nowNode.depth + 1, } - matchingDepPkgVersionIDs = append(matchingDepPkgVersionIDs, findMatchingDepPkgVersionIDs...) - } else { - matchingDepPkgVersionIDs = append(matchingDepPkgVersionIDs, isDep.DependencyPackage.Namespaces[0].Names[0].Versions[0].Id) + nodeMap[depPkgID] = dfsN } - for _, pkgID := range matchingDepPkgVersionIDs { - dfsN, seen := nodeMap[pkgID] - if !seen { - dfsN = dfsNode{ - parent: now, - pkgID: pkgID, - depth: nowNode.depth + 1, - } - nodeMap[pkgID] = dfsN - } - if !dfsN.expanded { - queue = append(queue, pkgID) - } - pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, pkgID, isDep) - if err != nil { - return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForPackage failed with error: %w", err) + if !dfsN.expanded { + queue = append(queue, depPkgID) + } + pkgVersionNeighbors, err := getVulnAndVexNeighborsForPackage(ctx, gqlclient, depPkgID, isDep) + if err != nil { + return nil, nil, fmt.Errorf("getVulnAndVexNeighbors failed with error: %w", err) + } + collectedPkgVersionResults = append(collectedPkgVersionResults, pkgVersionNeighbors) + checkedPkgIDs[depPkgID] = true + + } + + for _, isDep := range hasSBOM.IncludedDependencies { + if isDep.DependencyPackage.Type == guacType { + continue + } + + depPkgID := isDep.DependencyPackage.Namespaces[0].Names[0].Versions[0].Id + if _, seen := nodeMap[depPkgID]; !seen { + dfsN := dfsNode{ + parent: now, + pkgID: depPkgID, + depth: nowNode.depth + 1, } - collectedPkgVersionResults = append(collectedPkgVersionResults, pkgVersionNeighbors) - checkedPkgIDs[pkgID] = true + nodeMap[depPkgID] = dfsN } } } @@ -220,7 +224,7 @@ func SearchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, sea if nowNode.depth == 0 && primaryCall { split := strings.Split(now, ":") if len(split) != 2 { - return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %w", now, split) + return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %v", now, split) } algorithm := strings.ToLower(split[0]) digest := strings.ToLower(split[1]) From 27d63bd0c604e1bde9602f5a1a774e47b20f46b8 Mon Sep 17 00:00:00 2001 From: nathannaveen <42319948+nathannaveen@users.noreply.github.com> Date: Mon, 9 Sep 2024 07:04:27 -0500 Subject: [PATCH 5/7] Updated based on code review Signed-off-by: nathannaveen <42319948+nathannaveen@users.noreply.github.com> --- pkg/guacanalytics/searchForSBOM.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pkg/guacanalytics/searchForSBOM.go b/pkg/guacanalytics/searchForSBOM.go index 70bbd4fb86..167236a365 100644 --- a/pkg/guacanalytics/searchForSBOM.go +++ b/pkg/guacanalytics/searchForSBOM.go @@ -191,6 +191,9 @@ func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchSt return path, tableRows, nil } +// SearchForSBOMViaArtifact takes in either a URI for the initial value to find the hasSBOM node. +// It concurrently checks the artifact node if it contains vulnerabilities and VEX data. +// The primaryCall parameter is used to know whether the searchString is expected to be an artifact or a package. func SearchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { var path []string var tableRows []table.Row From b9f0318367068599f362c2c8abf48b4d918907e2 Mon Sep 17 00:00:00 2001 From: pxp928 Date: Tue, 17 Sep 2024 16:04:00 -0400 Subject: [PATCH 6/7] update vuln CLI to handle artifact being the subject Signed-off-by: pxp928 --- cmd/guacone/cmd/vulnerability.go | 125 ++++++-------------- pkg/guacanalytics/searchForSBOM.go | 181 +++-------------------------- 2 files changed, 54 insertions(+), 252 deletions(-) diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index dc435cc839..79008cfa98 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -18,12 +18,13 @@ package cmd import ( "context" "fmt" - "github.com/guacsec/guac/pkg/guacanalytics" - "go.uber.org/zap" "net/http" "os" "strings" + "github.com/guacsec/guac/pkg/guacanalytics" + "go.uber.org/zap" + "github.com/guacsec/guac/internal/testing/ptrfrom" "github.com/Khan/genqlient/graphql" @@ -126,50 +127,21 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer var path []string var tableRows []table.Row - if opts.isPurl { - // The primaryCall parameter in searchForSBOMViaPkg is there for us to know whether - // the searchString is expected to be a PURL, and we are searching via a purl. - depVulnPath, depVulnTableRows, err := guacanalytics.SearchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, true) - if err != nil { - logger.Fatalf("error searching via hasSBOM: %v", err) - } - - path = append(path, depVulnPath...) - tableRows = append(tableRows, depVulnTableRows...) - - if len(depVulnPath) == 0 { - err, o := searchStringToOccurrence(ctx, gqlclient, opts, logger) - if err != nil { - logger.Fatalf("error searching for occurrence: %v", err) - } - - // The primaryCall parameter in searchForSBOMViaArtifact is there for us to know that - // the searchString is expected to be a PURL, but isn't, so we have to check via artifacts instead of PURLs. - depVulnPath, depVulnTableRows, err = guacanalytics.SearchForSBOMViaArtifact(ctx, gqlclient, o.IsOccurrence[0].Artifact.Id, opts.depth, false) - if err != nil { - logger.Fatalf("error searching for SBOMs via artifact: %v", err) - } - - path = append(path, depVulnPath...) - tableRows = append(tableRows, depVulnTableRows...) - } - } else { - // The primaryCall parameter in searchForSBOMViaArtifact is there for us to know that - // the searchString isn't expected to be a PURL, so we have to check artifacts. - depVulnPath, depVulnTableRows, err := guacanalytics.SearchForSBOMViaArtifact(ctx, gqlclient, opts.searchString, opts.depth, true) - if err != nil { - logger.Fatalf("error searching for SBOMs via artifact: %v", err) - } + // The primaryCall parameter in searchForSBOMViaPkg is there for us to know whether + // the searchString is expected to be a PURL, and we are searching via a purl. + depVulnPath, depVulnTableRows, err := guacanalytics.SearchForSBOMViaPkg(ctx, gqlclient, opts.searchString, opts.depth, opts.isPurl) + if err != nil { + logger.Fatalf("error searching via hasSBOM: %v", err) + } - path = append(path, depVulnPath...) - tableRows = append(tableRows, depVulnTableRows...) + path = append(path, depVulnPath...) + tableRows = append(tableRows, depVulnTableRows...) - if len(depVulnPath) == 0 { - err, subjectPackage := searchStringToPkg(ctx, gqlclient, opts, logger) - if err != nil { - logger.Fatalf("error searching for packages: %v", err) - } + if len(depVulnPath) == 0 { + occur := searchArtToPkg(ctx, gqlclient, opts.searchString, logger) + subjectPackage, ok := occur.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) + if ok { // The primaryCall parameter in searchForSBOMViaPkg is there for us to know that // the searchString is expected to be an artifact, but isn't, so we have to check via PURLs instead of artifacts. depVulnPath, depVulnTableRows, err = guacanalytics.SearchForSBOMViaPkg(ctx, gqlclient, subjectPackage.Namespaces[0].Names[0].Versions[0].Id, opts.depth, false) @@ -192,8 +164,8 @@ func printVulnInfo(ctx context.Context, gqlclient graphql.Client, t table.Writer } } -func searchStringToPkg(ctx context.Context, gqlclient graphql.Client, opts queryOptions, logger *zap.SugaredLogger) (error, *model.AllIsOccurrencesTreeSubjectPackage) { - split := strings.Split(opts.searchString, ":") +func searchArtToPkg(ctx context.Context, gqlclient graphql.Client, searchString string, logger *zap.SugaredLogger) *model.OccurrencesResponse { + split := strings.Split(searchString, ":") if len(split) != 2 { logger.Fatalf("failed to parse artifact. Needs to be in algorithm:digest form") } @@ -209,47 +181,7 @@ func searchStringToPkg(ctx context.Context, gqlclient graphql.Client, opts query logger.Fatalf("error querying for occurrences: %v", err) } - subjectPackage, ok := o.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) - if !ok { - logger.Fatalf("error asserting type for Subject as *model.AllIsOccurrencesTreeSubjectPackage") - } - return err, subjectPackage -} - -func searchStringToOccurrence(ctx context.Context, gqlclient graphql.Client, opts queryOptions, logger *zap.SugaredLogger) (error, *model.OccurrencesResponse) { - pkgInput, err := helpers.PurlToPkg(opts.searchString) - if err != nil { - logger.Fatalf("failed to parse PURL: %v", err) - } - - pkgQualifierFilter := []model.PackageQualifierSpec{} - for _, qualifier := range pkgInput.Qualifiers { - // to prevent https://github.com/golang/go/discussions/56010 - qualifier := qualifier - pkgQualifierFilter = append(pkgQualifierFilter, model.PackageQualifierSpec{ - Key: qualifier.Key, - Value: &qualifier.Value, - }) - } - - pkgFilter := &model.PkgSpec{ - Type: &pkgInput.Type, - Namespace: pkgInput.Namespace, - Name: &pkgInput.Name, - Version: pkgInput.Version, - Subpath: pkgInput.Subpath, - Qualifiers: pkgQualifierFilter, - } - - o, err := model.Occurrences(ctx, gqlclient, model.IsOccurrenceSpec{ - Subject: &model.PackageOrSourceSpec{ - Package: pkgFilter, - }, - }) - if err != nil { - logger.Fatalf("error querying for occurrences: %v", err) - } - return err, o + return o } func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t table.Writer, opts queryOptions) { @@ -272,8 +204,9 @@ func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t tabl if err != nil { logger.Fatalf("getPkgResponseFromPurl - error: %v", err) } - path, tableRows, err = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) - if err != nil { + var vulnNeighborError error + path, tableRows, vulnNeighborError = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) + if vulnNeighborError != nil { logger.Fatalf("error querying neighbor: %v", err) } } else { @@ -285,10 +218,22 @@ func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t tabl logger.Fatalf("failed to located singular hasSBOM based on URI") } if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { - path, tableRows, err = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) - if err != nil { + var vulnNeighborError error + path, tableRows, vulnNeighborError = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) + if vulnNeighborError != nil { logger.Fatalf("error querying neighbor: %v", err) } + } else if artResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectArtifact); ok { + occur := searchArtToPkg(ctx, gqlclient, artResponse.Algorithm+":"+artResponse.Digest, logger) + subjectPackage, ok := occur.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) + if ok { + var vulnNeighborError error + path, tableRows, vulnNeighborError = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, subjectPackage.Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) + if vulnNeighborError != nil { + logger.Fatalf("error querying neighbor: %v", err) + } + } + } else { logger.Fatalf("located hasSBOM does not have a subject that is a package") } diff --git a/pkg/guacanalytics/searchForSBOM.go b/pkg/guacanalytics/searchForSBOM.go index 167236a365..3391c03eda 100644 --- a/pkg/guacanalytics/searchForSBOM.go +++ b/pkg/guacanalytics/searchForSBOM.go @@ -3,11 +3,11 @@ package guacanalytics import ( "context" "fmt" + "github.com/Khan/genqlient/graphql" model "github.com/guacsec/guac/pkg/assembler/clients/generated" "github.com/guacsec/guac/pkg/assembler/helpers" "github.com/jedib0t/go-pretty/v6/table" - "strings" ) const ( @@ -22,11 +22,6 @@ type pkgVersionNeighborQueryResults struct { isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency } -type artifactVersionNeighborQueryResults struct { - pkgVersionNeighborResponse *model.NeighborsResponse - isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence -} - func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Client, pkgID string, isDep model.AllHasSBOMTreeIncludedDependenciesIsDependency) (*pkgVersionNeighborQueryResults, error) { pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgePackageCertifyVuln, model.EdgePackageCertifyVexStatement}) if err != nil { @@ -35,19 +30,11 @@ func getVulnAndVexNeighborsForPackage(ctx context.Context, gqlclient graphql.Cli return &pkgVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isDep: isDep}, nil } -func getVulnAndVexNeighborsForArtifact(ctx context.Context, gqlclient graphql.Client, pkgID string, isArt model.AllHasSBOMTreeIncludedOccurrencesIsOccurrence) (*artifactVersionNeighborQueryResults, error) { - pkgVersionNeighborResponse, err := model.Neighbors(ctx, gqlclient, pkgID, []model.Edge{model.EdgeArtifactCertifyVexStatement}) - if err != nil { - return nil, fmt.Errorf("failed to get neighbors for pkgID: %s with error %w", pkgID, err) - } - return &artifactVersionNeighborQueryResults{pkgVersionNeighborResponse: pkgVersionNeighborResponse, isArt: isArt}, nil -} - // SearchForSBOMViaPkg takes in either a purl or URI for the initial value to find the hasSBOM node. // From there is recursively searches through all the dependencies to determine if it contains hasSBOM nodes. // It concurrent checks the package version node if it contains vulnerabilities and VEX data. -// The primaryCall parameter is used to know whether the searchString is expected to be a PURL. -func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { +// The isPurl parameter is used to know whether the searchString is expected to be a PURL. +func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, isPurl bool) ([]string, []table.Row, error) { var path []string var tableRows []table.Row checkedPkgIDs := make(map[string]bool) @@ -79,14 +66,22 @@ func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchSt // if the initial depth, check if it's a purl or an SBOM URI. Otherwise, always search by pkgID // note that primaryCall will be static throughout the entire function. - if nowNode.depth == 0 && primaryCall { - pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) - if err != nil { - return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %w", err) - } - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) + if nowNode.depth == 0 { + + if isPurl { + pkgResponse, err := getPkgResponseFromPurl(ctx, gqlclient, now) + if err != nil { + return nil, nil, fmt.Errorf("getPkgResponseFromPurl - error: %v", err) + } + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &pkgResponse.Packages[0].Namespaces[0].Names[0].Versions[0].Id}}}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via purl: %s with error :%w", now, err) + } + } else { + foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Uri: &now}) + if err != nil { + return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) + } } } else { foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Package: &model.PkgSpec{Id: &now}}}) @@ -191,144 +186,6 @@ func SearchForSBOMViaPkg(ctx context.Context, gqlclient graphql.Client, searchSt return path, tableRows, nil } -// SearchForSBOMViaArtifact takes in either a URI for the initial value to find the hasSBOM node. -// It concurrently checks the artifact node if it contains vulnerabilities and VEX data. -// The primaryCall parameter is used to know whether the searchString is expected to be an artifact or a package. -func SearchForSBOMViaArtifact(ctx context.Context, gqlclient graphql.Client, searchString string, maxLength int, primaryCall bool) ([]string, []table.Row, error) { - var path []string - var tableRows []table.Row - checkedArtifactIDs := make(map[string]bool) - var collectedArtifactResults []*artifactVersionNeighborQueryResults - - queue := make([]string, 0) // the queue of nodes in bfs - type dfsNode struct { - expanded bool // true once all node neighbors are added to queue - parent string - artID string - depth int - } - nodeMap := map[string]dfsNode{} - - nodeMap[searchString] = dfsNode{} - queue = append(queue, searchString) - - for len(queue) > 0 { - now := queue[0] - queue = queue[1:] - nowNode := nodeMap[now] - - if maxLength != 0 && nowNode.depth >= maxLength { - break - } - - var foundHasSBOMPkg *model.HasSBOMsResponse - var err error - - if nowNode.depth == 0 && primaryCall { - split := strings.Split(now, ":") - if len(split) != 2 { - return nil, nil, fmt.Errorf("error splitting search string %s, search string should have two sections algorithm and digest: %v", now, split) - } - algorithm := strings.ToLower(split[0]) - digest := strings.ToLower(split[1]) - - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{ - Artifact: &model.ArtifactSpec{ - Algorithm: &algorithm, - Digest: &digest, - }, - }}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via URI: %s with error: %w", now, err) - } - } else { - foundHasSBOMPkg, err = model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Subject: &model.PackageOrArtifactSpec{Artifact: &model.ArtifactSpec{Id: &now}}}) - if err != nil { - return nil, nil, fmt.Errorf("failed getting hasSBOM via artifact: %s with error :%w", now, err) - } - } - - for _, hasSBOM := range foundHasSBOMPkg.HasSBOM { - if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { - if pkgResponse.Type != guacType { - if !checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] { - vulnPath, pkgVulnTableRows, err := queryVulnsViaPackageNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id) - if err != nil { - return nil, nil, fmt.Errorf("error querying neighbor: %w", err) - } - path = append(path, vulnPath...) - tableRows = append(tableRows, pkgVulnTableRows...) - path = append([]string{pkgResponse.Namespaces[0].Names[0].Versions[0].Id, - pkgResponse.Namespaces[0].Names[0].Id, pkgResponse.Namespaces[0].Id, - pkgResponse.Id}, path...) - checkedArtifactIDs[pkgResponse.Namespaces[0].Names[0].Versions[0].Id] = true - } - } - } - for _, isOcc := range hasSBOM.IncludedOccurrences { - if *isOcc.Subject.GetTypename() == guacType { - continue - } - var matchingArtifactIDs []string - matchingArtifactIDs = append(matchingArtifactIDs, isOcc.Artifact.Id) - - for _, artID := range matchingArtifactIDs { - dfsN, seen := nodeMap[artID] - if !seen { - dfsN = dfsNode{ - parent: now, - artID: artID, - depth: nowNode.depth + 1, - } - nodeMap[artID] = dfsN - } - if !dfsN.expanded { - queue = append(queue, artID) - } - artifactNeighbors, err := getVulnAndVexNeighborsForArtifact(ctx, gqlclient, artID, isOcc) - if err != nil { - return nil, nil, fmt.Errorf("getVulnAndVexNeighborsForArtifact failed with error: %w", err) - } - collectedArtifactResults = append(collectedArtifactResults, artifactNeighbors) - checkedArtifactIDs[artID] = true - } - } - } - nowNode.expanded = true - nodeMap[now] = nowNode - } - - checkedCertifyVulnIDs := make(map[string]bool) - - // Collect results from the channel - for _, result := range collectedArtifactResults { - for _, neighbor := range result.pkgVersionNeighborResponse.Neighbors { - if certifyVuln, ok := neighbor.(*model.NeighborsNeighborsCertifyVuln); ok { - if !checkedCertifyVulnIDs[certifyVuln.Id] && certifyVuln.Vulnerability.Type != noVulnType { - checkedCertifyVulnIDs[certifyVuln.Id] = true - for _, vuln := range certifyVuln.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{certifyVulnStr, certifyVuln.Id, "vulnerability ID: " + vuln.VulnerabilityID}) - path = append(path, []string{vuln.Id, certifyVuln.Id, - certifyVuln.Package.Namespaces[0].Names[0].Versions[0].Id, - certifyVuln.Package.Namespaces[0].Names[0].Id, certifyVuln.Package.Namespaces[0].Id, - certifyVuln.Package.Id}...) - } - path = append(path, result.isArt.Id, result.isArt.Artifact.Id) - } - } - - if certifyVex, ok := neighbor.(*model.NeighborsNeighborsCertifyVEXStatement); ok { - for _, vuln := range certifyVex.Vulnerability.VulnerabilityIDs { - tableRows = append(tableRows, table.Row{vexLinkStr, certifyVex.Id, "vulnerability ID: " + vuln.VulnerabilityID + ", Vex Status: " + string(certifyVex.Status) + ", Subject: " + VexSubjectString(certifyVex.Subject)}) - path = append(path, certifyVex.Id, vuln.Id) - } - path = append(path, vexSubjectIds(certifyVex.Subject)...) - } - } - } - return path, tableRows, nil -} - func getPkgResponseFromPurl(ctx context.Context, gqlclient graphql.Client, purl string) (*model.PackagesResponse, error) { pkgInput, err := helpers.PurlToPkg(purl) if err != nil { From 5b698f1418d32570810324959b1d32338e1d0f43 Mon Sep 17 00:00:00 2001 From: pxp928 Date: Tue, 17 Sep 2024 16:29:02 -0400 Subject: [PATCH 7/7] fix bug on searchDependencyPackagesReverse Signed-off-by: pxp928 --- cmd/guacone/cmd/vulnerability.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cmd/guacone/cmd/vulnerability.go b/cmd/guacone/cmd/vulnerability.go index 79008cfa98..032f0b9782 100644 --- a/cmd/guacone/cmd/vulnerability.go +++ b/cmd/guacone/cmd/vulnerability.go @@ -210,20 +210,20 @@ func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t tabl logger.Fatalf("error querying neighbor: %v", err) } } else { - foundHasSBOMPkg, err := model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Uri: &opts.searchString}) + foundHasSBOM, err := model.HasSBOMs(ctx, gqlclient, model.HasSBOMSpec{Uri: &opts.searchString}) if err != nil { logger.Fatalf("failed getting hasSBOM via URI: %s with error: %w", opts.searchString, err) } - if len(foundHasSBOMPkg.HasSBOM) != 1 { + if len(foundHasSBOM.HasSBOM) != 1 { logger.Fatalf("failed to located singular hasSBOM based on URI") } - if pkgResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { + if pkgResponse, ok := foundHasSBOM.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectPackage); ok { var vulnNeighborError error path, tableRows, vulnNeighborError = queryVulnsViaVulnNodeNeighbors(ctx, gqlclient, pkgResponse.Namespaces[0].Names[0].Versions[0].Id, vulnResponse.Vulnerabilities, opts.depth, opts.pathsToReturn) if vulnNeighborError != nil { logger.Fatalf("error querying neighbor: %v", err) } - } else if artResponse, ok := foundHasSBOMPkg.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectArtifact); ok { + } else if artResponse, ok := foundHasSBOM.HasSBOM[0].Subject.(*model.AllHasSBOMTreeSubjectArtifact); ok { occur := searchArtToPkg(ctx, gqlclient, artResponse.Algorithm+":"+artResponse.Digest, logger) subjectPackage, ok := occur.IsOccurrence[0].Subject.(*model.AllIsOccurrencesTreeSubjectPackage) if ok { @@ -233,9 +233,8 @@ func printVulnInfoByVulnId(ctx context.Context, gqlclient graphql.Client, t tabl logger.Fatalf("error querying neighbor: %v", err) } } - } else { - logger.Fatalf("located hasSBOM does not have a subject that is a package") + logger.Fatalf("located hasSBOM does not have a subject that is a package or artifact") } } if len(path) > 0 { @@ -380,8 +379,9 @@ func searchDependencyPackagesReverse(ctx context.Context, gqlclient graphql.Clie nodeMap[now] = nowNode } + // not found so return nil if topPkgID != "" && !found { - return nil, fmt.Errorf("no path found up to specified length") + return nil, nil } var now string