From 379de1e29e76bc18f7ef3dd0ac7bb851f4308b4b Mon Sep 17 00:00:00 2001 From: biennd4 Date: Mon, 11 Mar 2024 18:42:55 +0700 Subject: [PATCH 01/21] draf parser --- dojo/tools/checkmarx_cxflow_sast/__init__.py | 0 dojo/tools/checkmarx_cxflow_sast/parser.py | 73 +++++++ .../checkmarx_cxflow_sast/1-finding.json | 192 ++++++++++++++++++ .../checkmarx_cxflow_sast/no_finding.json | 21 ++ .../test_checkmarx_cxflow_sast_parser.py | 37 ++++ 5 files changed, 323 insertions(+) create mode 100644 dojo/tools/checkmarx_cxflow_sast/__init__.py create mode 100644 dojo/tools/checkmarx_cxflow_sast/parser.py create mode 100644 unittests/scans/checkmarx_cxflow_sast/1-finding.json create mode 100644 unittests/scans/checkmarx_cxflow_sast/no_finding.json create mode 100644 unittests/tools/test_checkmarx_cxflow_sast_parser.py diff --git a/dojo/tools/checkmarx_cxflow_sast/__init__.py b/dojo/tools/checkmarx_cxflow_sast/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py new file mode 100644 index 0000000000..fcc5ef6c3b --- /dev/null +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -0,0 +1,73 @@ +import json +import dateutil.parser + +from dojo.models import Finding + + +class CheckmarxCXFlowSastParser(object): + def __init__(self): + pass + + def get_scan_types(self): + return ["CheckmarxCxFlow"] + + def get_label_for_scan_types(self, scan_type): + return scan_type # no custom label for now + + def get_description_for_scan_types(self, scan_type): + if scan_type == "CheckmarxCxFlow Scan": + return "Simple Report. Aggregates vulnerabilities per categories, cwe, name, sinkFilename" + else: + return "Detailed Report. Import all vulnerabilities from checkmarx without aggregation" + + def get_findings(self, file, test): + if file.name.strip().lower().endswith(".json"): + return self._get_findings_json(file, test) + else: + return [] + + def _get_findings_json(self, file, test): + data = json.load(file) + findings = [] + deepLink = data.get("deepLink") + additional_details = data.get("additionalDetails") + scan_start_date = additional_details.get("scanStartDate") + + issues = data.get("xissues", []) + + for issue in issues: + vulnerability = issue.get("vulnerability") + status = issue.get("vulnerabilityStatus") + cwe = issue.get("cwe") + description = issue.get("description") + language = issue.get("language") + severity = issue.get("severity") + link = issue.get("link") + filename = issue.get("filename") + similarity_id = issue.get("similarityId") + + finding = Finding( + title=vulnerability.replace("_", " "), + cwe=int(cwe), + file_path=filename, + date=dateutil.parser.parse(scan_start_date), + static_finding=True, + unique_id_from_tool=similarity_id, + ) + + findings.append(finding) + + + return findings + + def _get_findings_xml(self): + pass + + def is_verify(self, status): + pass + + def is_active(self, status): + pass + + def is_mitigated(self, status): + pass \ No newline at end of file diff --git a/unittests/scans/checkmarx_cxflow_sast/1-finding.json b/unittests/scans/checkmarx_cxflow_sast/1-finding.json new file mode 100644 index 0000000000..dc872a2a66 --- /dev/null +++ b/unittests/scans/checkmarx_cxflow_sast/1-finding.json @@ -0,0 +1,192 @@ +{ + "projectId": "6", + "team": "CxServer", + "project": "some-example", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6", + "files": "1", + "loc": "268", + "scanType": "Full", + "version":"8.9.0.210", + "additionalDetails": { + "flow-summary": { + "High": 1 + }, + "scanId": "1000026", + "scanStartDate": "Sunday, January 19, 2020 2:40:11 AM" + }, + "xissues": [ + { + "vulnerability": "Reflected_XSS_All_Clients", + "vulnerabilityStatus": "TO VERIFY", + "similarityId": "14660819", + "cwe": "79", + "description": "", + "language": "Java", + "severity": "High", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=2", + "filename": "DOS_Login.java", + "falsePositiveCount": 0, + "details": { + "88": { + "falsePositive": false, + "codeSnippet": "username = s.getParser().getRawParameter(USERNAME);", + "comment": "" + } + }, + "additionalDetails": { + "recommendedFix": "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=591&queryVersionCode=56110529&queryTitle=Reflected_XSS_All_Clients", + "categories": "PCI DSS v3.2;PCI DSS (3.2) - 6.5.7 - Cross-site scripting (XSS),OWASP Top 10 2013;A3-Cross-Site Scripting (XSS),FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-15 Information Output Filtering (P0),OWASP Top 10 2017;A7-Cross-Site Scripting (XSS)", + "results": [ + { + "sink": { + "file": "AnotherFile.java", + "line": "107", + "column": "9", + "object": "username", + "length" : "8", + "snippet" : "+ username + \"' and password = '\" + password + \"'\";" + }, + "state": "0", + "source": { + "file": "DOS_Login.java", + "line": "88", + "column": "46", + "object": "getRawParameter", + "length" : "1", + "snippet" : "username = s.getParser().getRawParameter(USERNAME);" + }, + "1" : { + "snippet" : "username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "+ username + \"' and password = '\" + password + \"'\";", + "file" : "AnotherFile.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + } + } + ], + "CodeBashingLesson" : "https://cxa.codebashing.com/courses/" + }, + "allFalsePositive": false + } + ], + "unFilteredIssues": [ { + "vulnerability" : "Reflected_XSS_All_Clients", + "vulnerabilityStatus" : "TO VERIFY", + "similarityId" : "14660819", + "cwe" : "79", + "description" : "", + "language" : "Java", + "severity" : "High", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=2", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "88" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=591&queryVersionCode=56110529&queryTitle=Reflected_XSS_All_Clients", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.7 - Cross-site scripting (XSS),OWASP Top 10 2013;A3-Cross-Site Scripting (XSS),FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-15 Information Output Filtering (P0),OWASP Top 10 2017;A7-Cross-Site Scripting (XSS)", + "results" : [ { + "1" : { + "snippet" : "username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "+ username + \"' and password = '\" + password + \"'\";", + "file" : "AnotherFile.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "sink" : { + "snippet" : "+ username + \"' and password = '\" + password + \"'\";", + "file" : "AnotherFile.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "state" : "0", + "source" : { + "snippet" : "username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + } ] + }, + "allFalsePositive" : false + } ], + "reportCreationTime":"Sunday, January 19, 2020 2:41:53 AM", + "deepLink":"http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6", + "scanTime":"00h:01m:30s", + "sastResults": false +} \ No newline at end of file diff --git a/unittests/scans/checkmarx_cxflow_sast/no_finding.json b/unittests/scans/checkmarx_cxflow_sast/no_finding.json new file mode 100644 index 0000000000..ba73c156ab --- /dev/null +++ b/unittests/scans/checkmarx_cxflow_sast/no_finding.json @@ -0,0 +1,21 @@ +{ + "projectId": "5", + "team": "CxServer", + "project": "EmptyClass", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000024&projectid=5", + "files": "1", + "loc": "6", + "scanType": "Full", + "version":"8.9.0.210", + "additionalDetails": { + "flow-summary": {}, + "scanId": "1000024", + "scanStartDate": "Wednesday, January 15, 2020 1:31:13 PM" + }, + "xissues": [], + "unFilteredIssues": [], + "reportCreationTime":"Wednesday, January 15, 2020 1:32:47 PM", + "deepLink":"http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000024&projectid=5", + "scanTime":"00h:01m:24s", + "sastResults": false +} \ No newline at end of file diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py new file mode 100644 index 0000000000..6d9f2fca80 --- /dev/null +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -0,0 +1,37 @@ +from dojo.models import Product, Engagement, Test +from dojo.tools.checkmarx_cxflow_sast.parser import CheckmarxCXFlowSastParser +from ..dojo_test_case import DojoTestCase, get_unit_tests_path + +import dateutil.parser + + +class TestCheckmarxCxflowSast(DojoTestCase): + + def init(self, reportFilename): + my_file_handle = open(reportFilename) + product = Product() + engagement = Engagement() + test = Test() + engagement.product = product + test.engagement = engagement + return my_file_handle, product, engagement, test + + def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings(self): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/no_finding.json" + ) + parser = CheckmarxCXFlowSastParser() + findings = parser.get_findings(my_file_handle, test) + self.assertEqual(0, len(findings)) + + def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(self): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/1-finding.json" + ) + parser = CheckmarxCXFlowSastParser() + findings = parser.get_findings(my_file_handle, test) + self.assertEqual(1, len(findings)) + finding = findings[0] + self.assertEqual("Reflected XSS All Clients", finding.title) + self.assertEqual(79, finding.cwe1) + self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.) From 02f1dcc84e181a8f04c61e6255aff982966a3575 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Mon, 11 Mar 2024 18:43:58 +0700 Subject: [PATCH 02/21] fix typo --- unittests/tools/test_checkmarx_cxflow_sast_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index 6d9f2fca80..9ed62d3557 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -33,5 +33,5 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertEqual(1, len(findings)) finding = findings[0] self.assertEqual("Reflected XSS All Clients", finding.title) - self.assertEqual(79, finding.cwe1) - self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.) + self.assertEqual(79, finding.cwe) + self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) From cf9732821ea57b11e59b1b69d889317e4f2b0352 Mon Sep 17 00:00:00 2001 From: d3s34 Date: Tue, 12 Mar 2024 02:04:42 +0700 Subject: [PATCH 03/21] draft parser path node --- dojo/tools/checkmarx_cxflow_sast/parser.py | 70 ++++++++++++++++++---- 1 file changed, 60 insertions(+), 10 deletions(-) diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index fcc5ef6c3b..4067363e03 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -4,6 +4,35 @@ from dojo.models import Finding +class _PathNode(object): + def __init__(self, file: str, line: str, column: str, _object: str, length: str, snippet: str): + self.file = file + self.line = line + self.column = int(column) + self._object = _object + self.length = int(length) + self.snippet = snippet + + @classmethod + def from_json_object(cls, data): + return _PathNode( + data.get("file"), + data.get("line"), + data.get("column"), + data.get("object"), + data.get("length"), + data.get("snippet") + ) + + +class _Path(object): + def __init__(self, sink: _PathNode, source: _PathNode, state: int, paths: [_PathNode]): + self.sink = sink + self.source = source + self.state = int(state) + self.paths = paths + + class CheckmarxCXFlowSastParser(object): def __init__(self): pass @@ -46,17 +75,38 @@ def _get_findings_json(self, file, test): filename = issue.get("filename") similarity_id = issue.get("similarityId") - finding = Finding( - title=vulnerability.replace("_", " "), - cwe=int(cwe), - file_path=filename, - date=dateutil.parser.parse(scan_start_date), - static_finding=True, - unique_id_from_tool=similarity_id, - ) + issue_additional_details = issue.get("additionalDetails") + categories = issue_additional_details.get("categories") + results = issue_additional_details.get("results") - findings.append(finding) + map_paths = {} + + for result in results: + # all path nodes exclude sink, source, state + path_keys = sorted(filter(lambda k: isinstance(k, str) and k.isnumeric(), result.keys())) + + path = _Path( + sink=_PathNode.from_json_object(result.get("sink")), + source=_PathNode.from_json_object(result.get("source")), + state=result.get("state"), + paths=list([result[k] for k in path_keys]) + ) + map_paths[path.source.line] = path + + for detail_key in issue.get("details").keys(): + pass + + finding = Finding( + title=vulnerability.replace("_", " "), + cwe=int(cwe), + file_path=filename, + date=dateutil.parser.parse(scan_start_date), + static_finding=True, + unique_id_from_tool=similarity_id, + ) + + findings.append(finding) return findings @@ -70,4 +120,4 @@ def is_active(self, status): pass def is_mitigated(self, status): - pass \ No newline at end of file + pass From a61b9198a7a092126459d7ce4e46d2f56bdfed50 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 11:23:50 +0700 Subject: [PATCH 04/21] add parser --- dojo/tools/checkmarx_cxflow_sast/parser.py | 82 +- .../checkmarx_cxflow_sast/4-findings.json | 1220 +++++++++++++++++ .../test_checkmarx_cxflow_sast_parser.py | 41 +- 3 files changed, 1315 insertions(+), 28 deletions(-) create mode 100644 unittests/scans/checkmarx_cxflow_sast/4-findings.json diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 4067363e03..9c6842bff8 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -1,15 +1,18 @@ import json import dateutil.parser +import logging from dojo.models import Finding +logger = logging.getLogger(__name__) + class _PathNode(object): - def __init__(self, file: str, line: str, column: str, _object: str, length: str, snippet: str): + def __init__(self, file: str, line: str, column: str, node_object: str, length: str, snippet: str): self.file = file self.line = line self.column = int(column) - self._object = _object + self.node_object = node_object self.length = int(length) self.snippet = snippet @@ -26,10 +29,10 @@ def from_json_object(cls, data): class _Path(object): - def __init__(self, sink: _PathNode, source: _PathNode, state: int, paths: [_PathNode]): + def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_PathNode]): self.sink = sink self.source = source - self.state = int(state) + self.state = state self.paths = paths @@ -38,16 +41,13 @@ def __init__(self): pass def get_scan_types(self): - return ["CheckmarxCxFlow"] + return ["Checkmarx CxFlow SAST"] def get_label_for_scan_types(self, scan_type): return scan_type # no custom label for now def get_description_for_scan_types(self, scan_type): - if scan_type == "CheckmarxCxFlow Scan": - return "Simple Report. Aggregates vulnerabilities per categories, cwe, name, sinkFilename" - else: - return "Detailed Report. Import all vulnerabilities from checkmarx without aggregation" + return "Detailed Report. Import all vulnerabilities from checkmarx without aggregation" def get_findings(self, file, test): if file.name.strip().lower().endswith(".json"): @@ -92,32 +92,60 @@ def _get_findings_json(self, file, test): paths=list([result[k] for k in path_keys]) ) - map_paths[path.source.line] = path + map_paths[str(path.source.line)] = path for detail_key in issue.get("details").keys(): - pass - - finding = Finding( - title=vulnerability.replace("_", " "), - cwe=int(cwe), - file_path=filename, - date=dateutil.parser.parse(scan_start_date), - static_finding=True, - unique_id_from_tool=similarity_id, - ) - - findings.append(finding) + if detail_key not in map_paths: + logger.warning(f"{detail_key} not found in path, ignore") + else: + detail = map_paths[detail_key] + + finding_detail = f"**Category:** {categories}\n" + finding_detail += f"**Language:** {language}\n" + finding_detail += f"**Status:** {status}\n" + finding_detail += f"**Finding link:** [{link}]({link})\n" + finding_detail += f"**Description:** {description}\n" + finding_detail += f"**Source snippet:** `{detail.source.snippet if detail.source is not None else ''}`\n" + finding_detail += f"**Sink snippet:** `{detail.sink.snippet if detail.sink is not None else ''}`\n" + + finding = Finding( + title=vulnerability.replace("_", " ") + " " + detail.sink.file.split("/")[ + -1] if detail.sink is not None else "", + cwe=int(cwe), + date=dateutil.parser.parse(scan_start_date), + static_finding=True, + unique_id_from_tool=str(similarity_id) + str(detail_key), + test=test, + sast_source_object=detail.source.node_object if detail.source is not None else None, + sast_sink_object=detail.sink.node_object if detail.sink is not None else None, + sast_source_file_path=detail.source.file if detail.source is not None else None, + sast_source_line=detail.source.line if detail.source is not None else None, + vuln_id_from_tool=similarity_id, + severity=severity, + file_path=filename, + line=detail.sink.line, + false_p=issue.get("details")[detail_key].get("falsePositive"), + description=finding_detail, + verified=self.is_verify(detail.state), + active=self.is_active(detail.state) + ) + + findings.append(finding) return findings def _get_findings_xml(self): pass - def is_verify(self, status): - pass + def is_verify(self, state): + # Confirmed, urgent + verifiedStates = ["2", "3"] + return state in verifiedStates - def is_active(self, status): - pass + def is_active(self, state): + # To verify, Confirmed, Urgent, Proposed not exploitable + activeStates = ["0", "2", "3", "4"] + return state in activeStates - def is_mitigated(self, status): + def is_mitigated(self, state): pass diff --git a/unittests/scans/checkmarx_cxflow_sast/4-findings.json b/unittests/scans/checkmarx_cxflow_sast/4-findings.json new file mode 100644 index 0000000000..f8008d2968 --- /dev/null +++ b/unittests/scans/checkmarx_cxflow_sast/4-findings.json @@ -0,0 +1,1220 @@ +{ + "projectId": "6", + "team": "CxServer", + "project": "some-example", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6", + "files": "1", + "loc": "268", + "scanType": "Full", + "version":"8.9.0.210", + "additionalDetails": { + "flow-summary": { + "High": 4 + }, + "scanId": "1000026", + "scanStartDate": "Sunday, January 19, 2020 2:40:11 AM" + }, + "xissues": [ + { + "vulnerability": "Reflected_XSS_All_Clients", + "vulnerabilityStatus": "TO VERIFY", + "similarityId": "14660819", + "cwe": "79", + "description": "", + "language": "Java", + "severity": "High", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=2", + "filename": "DOS_Login.java", + "gitUrl": "", + "falsePositiveCount": 0, + "details": { + "88": { + "falsePositive": false, + "codeSnippet": "\t username = s.getParser().getRawParameter(USERNAME);", + "comment": "" + }, + "89": { + "falsePositive": false, + "codeSnippet": "\t password = s.getParser().getRawParameter(PASSWORD);", + "comment": "" + } + }, + "additionalDetails": { + "recommendedFix": "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=591&queryVersionCode=56110529&queryTitle=Reflected_XSS_All_Clients", + "categories": "PCI DSS v3.2;PCI DSS (3.2) - 6.5.7 - Cross-site scripting (XSS),OWASP Top 10 2013;A3-Cross-Site Scripting (XSS),FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-15 Information Output Filtering (P0),OWASP Top 10 2017;A7-Cross-Site Scripting (XSS)", + "results": [ + { + "sink": { + "file": "DOS_Login.java", + "line": "108", + "column": "20", + "object": "StringElement", + "length" : "3", + "snippet" : "\t ec.addElement(new StringElement(query));" + }, + "state": "0", + "source": { + "file": "DOS_Login.java", + "line": "88", + "column": "46", + "object": "getRawParameter", + "length" : "1", + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);" + }, + "1" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "6" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "7" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "38", + "length" : "5", + "object" : "query" + }, + "8" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + } + }, + { + "sink": { + "file": "DOS_Login.java", + "line": "108", + "column": "20", + "object": "StringElement", + "length" : "3", + "snippet" : "\t ec.addElement(new StringElement(query));" + }, + "state": "0", + "source": { + "file": "DOS_Login.java", + "line": "89", + "column": "46", + "object": "getRawParameter", + "length" : "1", + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);" + }, + "1" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + }, + "3" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "43", + "length" : "8", + "object" : "password" + }, + "4" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "5" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "38", + "length" : "5", + "object" : "query" + }, + "6" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + } + } + ] + }, + "allFalsePositive": false + }, + { + "vulnerability": "SQL_Injection", + "vulnerabilityStatus": "TO VERIFY", + "similarityId": "-1987639889", + "cwe": "89", + "description": "", + "language": "Java", + "severity": "High", + "link": "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=4", + "filename": "DOS_Login.java", + "falsePositiveCount": 0, + "details": { + "88": { + "falsePositive": false, + "codeSnippet": "\t username = s.getParser().getRawParameter(USERNAME);", + "comment": "" + }, + "89": { + "falsePositive": false, + "codeSnippet": "\t password = s.getParser().getRawParameter(PASSWORD);", + "comment": "" + } + }, + "additionalDetails": { + "recommendedFix": "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=594&queryVersionCode=56142311&queryTitle=SQL_Injection", + "categories": "PCI DSS v3.2;PCI DSS (3.2) - 6.5.1 - Injection flaws - particularly SQL injection,OWASP Top 10 2013;A1-Injection,FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-10 Information Input Validation (P1),OWASP Top 10 2017;A1-Injection,OWASP Mobile Top 10 2016;M7-Client Code Quality", + "results": [ + { + "sink": { + "file": "DOS_Login.java", + "line": "114", + "column": "45", + "object": "executeQuery", + "length" : "1", + "snippet" : "\t\tResultSet results = statement.executeQuery(query);" + }, + "state": "0", + "source": { + "file": "DOS_Login.java", + "line": "88", + "column": "46", + "object": "getRawParameter", + "length" : "1", + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);" + }, + "1" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "6" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "7" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "46", + "length" : "5", + "object" : "query" + }, + "8" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + } + }, + { + "sink": { + "file": "DOS_Login.java", + "line": "114", + "column": "45", + "object": "executeQuery", + "length" : "1", + "snippet" : "\t\tResultSet results = statement.executeQuery(query);" + }, + "state": "0", + "source": { + "file": "DOS_Login.java", + "line": "89", + "column": "46", + "object": "getRawParameter", + "length" : "1", + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);" + }, + "1" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + }, + "3" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "43", + "length" : "8", + "object" : "password" + }, + "4" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "5" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "46", + "length" : "5", + "object" : "query" + }, + "6" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + } + } + ], + "CodeBashingLesson" : "https://cxa.codebashing.com/courses/" + }, + "allFalsePositive": false + } + ], + "unFilteredIssues": [ { + "vulnerability" : "Reflected_XSS_All_Clients", + "vulnerabilityStatus" : "TO VERIFY", + "similarityId" : "14660819", + "cwe" : "79", + "description" : "", + "language" : "Java", + "severity" : "High", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=2", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "88" : { + "falsePositive" : false, + "comment" : "" + }, + "89" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=591&queryVersionCode=56110529&queryTitle=Reflected_XSS_All_Clients", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.7 - Cross-site scripting (XSS),OWASP Top 10 2013;A3-Cross-Site Scripting (XSS),FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-15 Information Output Filtering (P0),OWASP Top 10 2017;A7-Cross-Site Scripting (XSS)", + "results" : [ { + "1" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "sink" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "6" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "7" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "38", + "length" : "5", + "object" : "query" + }, + "8" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "state" : "0", + "source" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + }, { + "1" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + }, + "3" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "43", + "length" : "8", + "object" : "password" + }, + "4" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "5" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "38", + "length" : "5", + "object" : "query" + }, + "sink" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "6" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "state" : "0", + "source" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "SQL_Injection", + "vulnerabilityStatus" : "TO VERIFY", + "similarityId" : "-1987639889", + "cwe" : "89", + "description" : "", + "language" : "Java", + "severity" : "High", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=4", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "88" : { + "falsePositive" : false, + "comment" : "" + }, + "89" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=594&queryVersionCode=56142311&queryTitle=SQL_Injection", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.1 - Injection flaws - particularly SQL injection,OWASP Top 10 2013;A1-Injection,FISMA 2014;System And Information Integrity,NIST SP 800-53;SI-10 Information Input Validation (P1),OWASP Top 10 2017;A1-Injection,OWASP Mobile Top 10 2016;M7-Client Code Quality", + "results" : [ { + "1" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "9", + "length" : "8", + "object" : "username" + }, + "sink" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + }, + "6" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "7" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "46", + "length" : "5", + "object" : "query" + }, + "8" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + }, + "state" : "0", + "source" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + }, { + "1" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + }, + "3" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "43", + "length" : "8", + "object" : "password" + }, + "4" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "5" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "46", + "length" : "5", + "object" : "query" + }, + "sink" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + }, + "6" : { + "snippet" : "\t\tResultSet results = statement.executeQuery(query);", + "file" : "DOS_Login.java", + "line" : "114", + "column" : "45", + "length" : "1", + "object" : "executeQuery" + }, + "state" : "0", + "source" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "Heap_Inspection", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "244", + "description" : "", + "language" : "Java", + "severity" : "Medium", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=1", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "87" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=3771&queryVersionCode=94850879&queryTitle=Heap_Inspection", + "categories" : "OWASP Top 10 2013;A6-Sensitive Data Exposure,FISMA 2014;Media Protection,NIST SP 800-53;SC-4 Information in Shared Resources (P1),OWASP Top 10 2017;A3-Sensitive Data Exposure", + "results" : [ { + "1" : { + "snippet" : "\t String password = \"\";", + "file" : "DOS_Login.java", + "line" : "87", + "column" : "13", + "length" : "8", + "object" : "password" + }, + "sink" : { + "snippet" : "\t String password = \"\";", + "file" : "DOS_Login.java", + "line" : "87", + "column" : "13", + "length" : "8", + "object" : "password" + }, + "state" : "0", + "source" : { + "snippet" : "\t String password = \"\";", + "file" : "DOS_Login.java", + "line" : "87", + "column" : "13", + "length" : "8", + "object" : "password" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "Privacy_Violation", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "359", + "description" : "", + "language" : "Java", + "severity" : "Medium", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=10", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "89" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=639&queryVersionCode=56620121&queryTitle=Privacy_Violation", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.1 - Injection flaws - particularly SQL injection,OWASP Top 10 2013;A6-Sensitive Data Exposure,FISMA 2014;Identification And Authentication,NIST SP 800-53;SC-4 Information in Shared Resources (P1),OWASP Top 10 2017;A3-Sensitive Data Exposure", + "results" : [ { + "1" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + }, + "2" : { + "snippet" : "\t\t + username + \"' and password = '\" + password + \"'\";", + "file" : "DOS_Login.java", + "line" : "107", + "column" : "43", + "length" : "8", + "object" : "password" + }, + "3" : { + "snippet" : "\t String query = \"SELECT * FROM user_system_data WHERE user_name = '\"", + "file" : "DOS_Login.java", + "line" : "106", + "column" : "13", + "length" : "5", + "object" : "query" + }, + "4" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "38", + "length" : "5", + "object" : "query" + }, + "5" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "sink" : { + "snippet" : "\t ec.addElement(new StringElement(query));", + "file" : "DOS_Login.java", + "line" : "108", + "column" : "20", + "length" : "3", + "object" : "StringElement" + }, + "state" : "0", + "source" : { + "snippet" : "\t password = s.getParser().getRawParameter(PASSWORD);", + "file" : "DOS_Login.java", + "line" : "89", + "column" : "6", + "length" : "8", + "object" : "password" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "XSRF", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "352", + "description" : "", + "language" : "Java", + "severity" : "Medium", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=11", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "88" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=648&queryVersionCode=56715926&queryTitle=XSRF", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.9 - Cross-site request forgery,OWASP Top 10 2013;A8-Cross-Site Request Forgery (CSRF),NIST SP 800-53;SC-23 Session Authenticity (P1)", + "results" : [ { + "1" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + }, + "2" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "6", + "length" : "8", + "object" : "username" + }, + "3" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "37", + "length" : "8", + "object" : "username" + }, + "4" : { + "snippet" : "\t if (username.equals(\"jeff\") || username.equals(\"dave\"))", + "file" : "DOS_Login.java", + "line" : "92", + "column" : "10", + "length" : "8", + "object" : "username" + }, + "5" : { + "snippet" : "\t\t\t\t + username", + "file" : "DOS_Login.java", + "line" : "130", + "column" : "11", + "length" : "8", + "object" : "username" + }, + "sink" : { + "snippet" : "\t\t\t statement.executeUpdate(insertData1);", + "file" : "DOS_Login.java", + "line" : "134", + "column" : "31", + "length" : "1", + "object" : "executeUpdate" + }, + "6" : { + "snippet" : "\t\t\t String insertData1 = \"INSERT INTO user_login VALUES ( '\"", + "file" : "DOS_Login.java", + "line" : "129", + "column" : "15", + "length" : "11", + "object" : "insertData1" + }, + "7" : { + "snippet" : "\t\t\t statement.executeUpdate(insertData1);", + "file" : "DOS_Login.java", + "line" : "134", + "column" : "32", + "length" : "11", + "object" : "insertData1" + }, + "8" : { + "snippet" : "\t\t\t statement.executeUpdate(insertData1);", + "file" : "DOS_Login.java", + "line" : "134", + "column" : "31", + "length" : "1", + "object" : "executeUpdate" + }, + "state" : "0", + "source" : { + "snippet" : "\t username = s.getParser().getRawParameter(USERNAME);", + "file" : "DOS_Login.java", + "line" : "88", + "column" : "46", + "length" : "1", + "object" : "getRawParameter" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "Information_Exposure_Through_an_Error_Message", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "209", + "description" : "", + "language" : "Java", + "severity" : "Low", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=8", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "169" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=622&queryVersionCode=56439377&queryTitle=Information_Exposure_Through_an_Error_Message", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.5 - Improper error handling,OWASP Top 10 2013;A5-Security Misconfiguration,FISMA 2014;Configuration Management,NIST SP 800-53;SI-11 Error Handling (P2),OWASP Top 10 2017;A6-Security Misconfiguration", + "results" : [ { + "1" : { + "snippet" : "\t catch (SQLException sqle)", + "file" : "DOS_Login.java", + "line" : "169", + "column" : "26", + "length" : "4", + "object" : "sqle" + }, + "2" : { + "snippet" : "\t\tec.addElement(new P().addElement(sqle.getMessage()));", + "file" : "DOS_Login.java", + "line" : "171", + "column" : "36", + "length" : "4", + "object" : "sqle" + }, + "3" : { + "snippet" : "\t\tsqle.printStackTrace();", + "file" : "DOS_Login.java", + "line" : "172", + "column" : "3", + "length" : "4", + "object" : "sqle" + }, + "4" : { + "snippet" : "\t\tsqle.printStackTrace();", + "file" : "DOS_Login.java", + "line" : "172", + "column" : "23", + "length" : "1", + "object" : "printStackTrace" + }, + "sink" : { + "snippet" : "\t\tsqle.printStackTrace();", + "file" : "DOS_Login.java", + "line" : "172", + "column" : "23", + "length" : "1", + "object" : "printStackTrace" + }, + "state" : "0", + "source" : { + "snippet" : "\t catch (SQLException sqle)", + "file" : "DOS_Login.java", + "line" : "169", + "column" : "26", + "length" : "4", + "object" : "sqle" + } + }, { + "1" : { + "snippet" : "\t catch (SQLException sqle)", + "file" : "DOS_Login.java", + "line" : "169", + "column" : "26", + "length" : "4", + "object" : "sqle" + }, + "2" : { + "snippet" : "\t\tec.addElement(new P().addElement(sqle.getMessage()));", + "file" : "DOS_Login.java", + "line" : "171", + "column" : "36", + "length" : "4", + "object" : "sqle" + }, + "3" : { + "snippet" : "\t\tec.addElement(new P().addElement(sqle.getMessage()));", + "file" : "DOS_Login.java", + "line" : "171", + "column" : "51", + "length" : "1", + "object" : "getMessage" + }, + "4" : { + "snippet" : "\t\tec.addElement(new P().addElement(sqle.getMessage()));", + "file" : "DOS_Login.java", + "line" : "171", + "column" : "35", + "length" : "1", + "object" : "addElement" + }, + "sink" : { + "snippet" : "\t\tec.addElement(new P().addElement(sqle.getMessage()));", + "file" : "DOS_Login.java", + "line" : "171", + "column" : "35", + "length" : "1", + "object" : "addElement" + }, + "state" : "0", + "source" : { + "snippet" : "\t catch (SQLException sqle)", + "file" : "DOS_Login.java", + "line" : "169", + "column" : "26", + "length" : "4", + "object" : "sqle" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "Improper_Resource_Shutdown_or_Release", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "404", + "description" : "", + "language" : "Java", + "severity" : "Low", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=6", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "103" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=600&queryVersionCode=56205902&queryTitle=Improper_Resource_Shutdown_or_Release", + "categories" : "NIST SP 800-53;SC-5 Denial of Service Protection (P1)", + "results" : [ { + "1" : { + "snippet" : "\t\tconnection = DatabaseUtilities.makeConnection(s);", + "file" : "DOS_Login.java", + "line" : "103", + "column" : "48", + "length" : "1", + "object" : "makeConnection" + }, + "2" : { + "snippet" : "\t\tconnection = DatabaseUtilities.makeConnection(s);", + "file" : "DOS_Login.java", + "line" : "103", + "column" : "3", + "length" : "10", + "object" : "connection" + }, + "3" : { + "snippet" : "\t\tStatement statement = connection.createStatement(", + "file" : "DOS_Login.java", + "line" : "111", + "column" : "25", + "length" : "10", + "object" : "connection" + }, + "4" : { + "snippet" : "\t\tStatement statement = connection.createStatement(", + "file" : "DOS_Login.java", + "line" : "111", + "column" : "51", + "length" : "1", + "object" : "createStatement" + }, + "sink" : { + "snippet" : "\t\tStatement statement = connection.createStatement(", + "file" : "DOS_Login.java", + "line" : "111", + "column" : "51", + "length" : "1", + "object" : "createStatement" + }, + "state" : "0", + "source" : { + "snippet" : "\t\tconnection = DatabaseUtilities.makeConnection(s);", + "file" : "DOS_Login.java", + "line" : "103", + "column" : "48", + "length" : "1", + "object" : "makeConnection" + } + } ] + }, + "allFalsePositive" : false + }, { + "vulnerability" : "Use_Of_Hardcoded_Password", + "vulnerabilityStatus" : "TO VERIFY", + "cwe" : "259", + "description" : "", + "language" : "Java", + "severity" : "Low", + "link" : "http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=7", + "filename" : "DOS_Login.java", + "gitUrl" : "", + "falsePositiveCount" : 0, + "details" : { + "64" : { + "falsePositive" : false, + "comment" : "" + } + }, + "additionalDetails" : { + "recommendedFix" : "http://CX-FLOW-CLEAN/CxWebClient/ScanQueryDescription.aspx?queryID=604&queryVersionCode=56248316&queryTitle=Use_Of_Hardcoded_Password", + "categories" : "PCI DSS v3.2;PCI DSS (3.2) - 6.5.10 - Broken authentication and session management,OWASP Top 10 2013;A2-Broken Authentication and Session Management,FISMA 2014;Identification And Authentication,NIST SP 800-53;SC-28 Protection of Information at Rest (P1),OWASP Top 10 2017;A2-Broken Authentication,OWASP Mobile Top 10 2016;M9-Reverse Engineering", + "results" : [ { + "1" : { + "snippet" : " protected final static String PASSWORD = \"Password\";", + "file" : "DOS_Login.java", + "line" : "64", + "column" : "35", + "length" : "8", + "object" : "PASSWORD" + }, + "sink" : { + "snippet" : " protected final static String PASSWORD = \"Password\";", + "file" : "DOS_Login.java", + "line" : "64", + "column" : "35", + "length" : "8", + "object" : "PASSWORD" + }, + "state" : "0", + "source" : { + "snippet" : " protected final static String PASSWORD = \"Password\";", + "file" : "DOS_Login.java", + "line" : "64", + "column" : "35", + "length" : "8", + "object" : "PASSWORD" + } + } ] + }, + "allFalsePositive" : false + } ], + "reportCreationTime":"Sunday, January 19, 2020 2:41:53 AM", + "deepLink":"http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6", + "scanTime":"00h:01m:30s", + "sastResults": false +} diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index 9ed62d3557..2d0d56bdaa 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -32,6 +32,45 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s findings = parser.get_findings(my_file_handle, test) self.assertEqual(1, len(findings)) finding = findings[0] - self.assertEqual("Reflected XSS All Clients", finding.title) + self.assertIn("Reflected XSS All Clients", finding.title) self.assertEqual(79, finding.cwe) self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) + self.assertEqual("14660819" + "88", finding.unique_id_from_tool) + self.assertEqual("getRawParameter", finding.sast_source_object) + self.assertEqual("username", finding.sast_sink_object) + self.assertEqual("DOS_Login.java", finding.sast_source_file_path) + self.assertEqual("88", finding.sast_source_line) + self.assertEqual("14660819", finding.vuln_id_from_tool) + self.assertEqual("High", finding.severity) + self.assertEqual("107", finding.line) + self.assertEqual(False, finding.false_p) + self.assertIn("Java", finding.description) + self.assertIn("http://CX-FLOW-CLEAN/CxWebClient/ViewerMain.aspx?scanid=1000026&projectid=6&pathid=2", + finding.description) + self.assertIn("PCI DSS v3.2;PCI DSS (3.2) - 6.5.7 - Cross-site scripting (XSS),OWASP Top 10 " + "2013;A3-Cross-Site Scripting (XSS),FISMA 2014;System And Information Integrity," + "NIST SP 800-53;SI-15 Information Output Filtering (P0),OWASP Top 10 2017;A7-Cross-Site " + "Scripting (XSS)", finding.description) + self.assertEqual(True, finding.active) + self.assertEqual(False, finding.verified) + + def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_11_finding(self): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/4-findings.json" + ) + parser = CheckmarxCXFlowSastParser() + findings = parser.get_findings(my_file_handle, test) + self.assertEqual(4, len(findings)) + for finding in findings: + self.assertIsNotNone(finding.title) + self.assertIsNotNone(finding.date) + self.assertIsNotNone(finding.sast_source_object) + self.assertIsNotNone(finding.unique_id_from_tool) + self.assertIsNotNone(finding.sast_sink_object) + self.assertIsNotNone(finding.sast_source_file_path) + self.assertIsNotNone(finding.sast_source_line) + self.assertIsNotNone(finding.vuln_id_from_tool) + self.assertIsNotNone(finding.severity) + self.assertIsNotNone(finding.line) + self.assertIsNotNone(finding.false_p) + self.assertIsNotNone(finding.description) From ea3f6d4895f2444751b25385e5ce5fb41f226c62 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 11:48:11 +0700 Subject: [PATCH 05/21] add dedup aglo --- dojo/settings/settings.dist.py | 1 + dojo/tools/checkmarx_cxflow_sast/parser.py | 10 ++++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 8d131b88c6..54e83542eb 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1463,6 +1463,7 @@ def saml2_attrib_map_format(dict): 'MobSF Scan': DEDUPE_ALGO_HASH_CODE, 'OSV Scan': DEDUPE_ALGO_HASH_CODE, 'Nosey Parker Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, + 'Checkmarx CxFlow SAST': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 9c6842bff8..33eca17090 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -53,7 +53,8 @@ def get_findings(self, file, test): if file.name.strip().lower().endswith(".json"): return self._get_findings_json(file, test) else: - return [] + logger.warning(f"Not supported file format ${file}") + return list() def _get_findings_json(self, file, test): data = json.load(file) @@ -124,7 +125,7 @@ def _get_findings_json(self, file, test): severity=severity, file_path=filename, line=detail.sink.line, - false_p=issue.get("details")[detail_key].get("falsePositive"), + false_p=issue.get("details")[detail_key].get("falsePositive") or self.is_not_exploitable(detail.state), description=finding_detail, verified=self.is_verify(detail.state), active=self.is_active(detail.state) @@ -135,6 +136,7 @@ def _get_findings_json(self, file, test): return findings def _get_findings_xml(self): + # TODO: move logic from checkmarx to here pass def is_verify(self, state): @@ -147,5 +149,5 @@ def is_active(self, state): activeStates = ["0", "2", "3", "4"] return state in activeStates - def is_mitigated(self, state): - pass + def is_not_exploitable(self, state): + return state == "1" From ced3444a3517fb5bec7fe88b75025f43c0a45b93 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 11:53:47 +0700 Subject: [PATCH 06/21] integration docs --- .../en/integrations/parsers/file/checkmarx_cxflow_sast.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md diff --git a/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md b/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md new file mode 100644 index 0000000000..d8eb9f7b3f --- /dev/null +++ b/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md @@ -0,0 +1,8 @@ +--- +title: "Checkmarx CxFlow SAST" +toc_hide: true +--- +- `Checkmarx CxFlow SAST`: JSON report from Checkmarx Cxflow. + +### Sample Scan Data +Sample Checkmarx CxFlow SAST scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkmarx_cxflow_sast). From 31c74037201f7fc6846926ff63e2f38d0aa0b27e Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 11:56:57 +0700 Subject: [PATCH 07/21] commented unused var --- dojo/tools/checkmarx_cxflow_sast/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 33eca17090..8a13a6961b 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -59,7 +59,7 @@ def get_findings(self, file, test): def _get_findings_json(self, file, test): data = json.load(file) findings = [] - deepLink = data.get("deepLink") + # deepLink = data.get("deepLink") additional_details = data.get("additionalDetails") scan_start_date = additional_details.get("scanStartDate") From f9cdafb72881454741e8fbb3dd2358dfb2c79fd5 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 11:58:22 +0700 Subject: [PATCH 08/21] Revert "Merge remote-tracking branch 'upstream/dev' into feature-checkmarx-cxflow-sast" This reverts commit b167f2b5205b427ac0b26ae7fd3f6b4667a01cde, reversing changes made to 5257a25204dbc9e6603b3b64bc1d78eddb824140. --- .github/workflows/k8s-tests.yml | 12 + Dockerfile.integration-tests-debian | 2 +- components/yarn.lock | 4 + .../parsers/file/checkmarx_one.md | 8 - .../en/integrations/parsers/file/crunch42.md | 8 - .../parsers/file/npm_audit_7_plus.md | 26 -- .../en/integrations/parsers/file/pip_audit.md | 38 +- dojo/endpoint/views.py | 18 +- dojo/jira_link/helper.py | 190 +++----- dojo/locale/en/LC_MESSAGES/django.po | 2 +- dojo/models.py | 2 +- dojo/product/views.py | 158 +++---- dojo/settings/settings.dist.py | 5 - dojo/static/dojo/js/metrics.js | 6 + dojo/templates/base.html | 6 +- dojo/templates/dojo/endpoints.html | 15 +- dojo/templates/dojo/product.html | 4 + dojo/templates/dojo/view_endpoint.html | 8 +- dojo/tools/checkmarx/parser.py | 6 +- dojo/tools/checkmarx_one/__init__.py | 0 dojo/tools/checkmarx_one/parser.py | 110 ----- dojo/tools/crunch42/__init__.py | 0 dojo/tools/crunch42/parser.py | 88 ---- dojo/tools/npm_audit_7_plus/__init__.py | 0 dojo/tools/npm_audit_7_plus/parser.py | 225 --------- dojo/tools/pip_audit/parser.py | 136 ++---- dojo/utils.py | 7 +- helm/defectdojo/Chart.yaml | 2 +- .../scans/checkmarx_one/checkmarx_one.json | 284 ----------- .../scans/checkmarx_one/many_findings.json | 258 ---------- .../scans/checkmarx_one/no_findings.json | 6 - .../crunch42/crunch42_many_findings.json | 251 ---------- .../crunch42/crunch42_many_findings2.json | 442 ------------------ .../scans/npm_audit_7_plus/many_vulns.json | 188 -------- unittests/scans/npm_audit_7_plus/no_vuln.json | 23 - .../scans/npm_audit_7_plus/one_vuln.json | 75 --- unittests/scans/pip_audit/empty_new.json | 3 - unittests/scans/pip_audit/many_vulns_new.json | 91 ---- unittests/scans/pip_audit/zero_vulns_new.json | 18 - unittests/tools/test_checkmarx_one_parser.py | 47 -- unittests/tools/test_checkmarx_parser.py | 54 +-- unittests/tools/test_crunch42_parser.py | 32 -- .../tools/test_npm_audit_7_plus_parser.py | 41 -- unittests/tools/test_pip_audit_parser.py | 135 +++--- 44 files changed, 348 insertions(+), 2686 deletions(-) delete mode 100644 docs/content/en/integrations/parsers/file/checkmarx_one.md delete mode 100644 docs/content/en/integrations/parsers/file/crunch42.md delete mode 100644 docs/content/en/integrations/parsers/file/npm_audit_7_plus.md delete mode 100644 dojo/tools/checkmarx_one/__init__.py delete mode 100644 dojo/tools/checkmarx_one/parser.py delete mode 100644 dojo/tools/crunch42/__init__.py delete mode 100644 dojo/tools/crunch42/parser.py delete mode 100644 dojo/tools/npm_audit_7_plus/__init__.py delete mode 100644 dojo/tools/npm_audit_7_plus/parser.py delete mode 100644 unittests/scans/checkmarx_one/checkmarx_one.json delete mode 100644 unittests/scans/checkmarx_one/many_findings.json delete mode 100644 unittests/scans/checkmarx_one/no_findings.json delete mode 100644 unittests/scans/crunch42/crunch42_many_findings.json delete mode 100644 unittests/scans/crunch42/crunch42_many_findings2.json delete mode 100644 unittests/scans/npm_audit_7_plus/many_vulns.json delete mode 100644 unittests/scans/npm_audit_7_plus/no_vuln.json delete mode 100644 unittests/scans/npm_audit_7_plus/one_vuln.json delete mode 100644 unittests/scans/pip_audit/empty_new.json delete mode 100644 unittests/scans/pip_audit/many_vulns_new.json delete mode 100644 unittests/scans/pip_audit/zero_vulns_new.json delete mode 100644 unittests/tools/test_checkmarx_one_parser.py delete mode 100644 unittests/tools/test_crunch42_parser.py delete mode 100644 unittests/tools/test_npm_audit_7_plus_parser.py diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index dd34b88d76..f5ec107d83 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -29,6 +29,14 @@ env: --set mysql.enabled=false \ --set createPostgresqlSecret=true \ " + HELM_PGHA_DATABASE_SETTINGS: " \ + --set database=postgresqlha \ + --set postgresql.enabled=false \ + --set mysql.enabled=false \ + --set postgresqlha.enabled=true \ + --set createPostgresqlHaSecret=true \ + --set createPostgresqlHaPgpoolSecret=true \ + " jobs: setting_minikube_cluster: name: Kubernetes Deployment @@ -56,6 +64,10 @@ jobs: brokers: redis k8s: 'v1.23.9' os: debian + - databases: pgsqlha + brokers: rabbit + k8s: 'v1.23.9' + os: debian - databases: pgsql brokers: rabbit k8s: 'v1.23.9' diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index 545e4e3ef7..c7db1f1fee 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,7 +1,7 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.4.0@sha256:579832bed49ea6c275ce2fb5f2d515f5b03d2b6243f3c80fa8430e4f5a770e9a as openapitools +FROM openapitools/openapi-generator-cli:v7.3.0@sha256:74b9992692c836e42a02980db4b76bee94e17075e4487cd80f5c540dd57126b9 as openapitools FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as build WORKDIR /app RUN \ diff --git a/components/yarn.lock b/components/yarn.lock index d3d65c363f..ffe72a3aaf 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -538,6 +538,10 @@ fast-levenshtein@~2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== +flot-axis@markrcote/flot-axislabels#*: + version "0.0.0" + resolved "https://codeload.github.com/markrcote/flot-axislabels/tar.gz/a181e09d04d120d05e5bc2baaa8738b5b3670428" + flot@flot/flot#~0.8.3: version "0.8.3" resolved "https://codeload.github.com/flot/flot/tar.gz/453b017cc5acfd75e252b93e8635f57f4196d45d" diff --git a/docs/content/en/integrations/parsers/file/checkmarx_one.md b/docs/content/en/integrations/parsers/file/checkmarx_one.md deleted file mode 100644 index 1d5a07f0ca..0000000000 --- a/docs/content/en/integrations/parsers/file/checkmarx_one.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "Checkmarx One Scan" -toc_hide: true ---- -Import JSON Checkmarx One scanner reports - -### Sample Scan Data -Sample Checkmarx One scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkmarx_one). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/crunch42.md b/docs/content/en/integrations/parsers/file/crunch42.md deleted file mode 100644 index e8aa1b1e55..0000000000 --- a/docs/content/en/integrations/parsers/file/crunch42.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "Crunch42 Scan" -toc_hide: true ---- -Import JSON findings from Crunch42 vulnerability scan tool. - -### Sample Scan Data -Sample Crunch42 Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/crunch42). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md b/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md deleted file mode 100644 index a4b4a090b0..0000000000 --- a/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: "NPM Audit Version 7+" -toc_hide: true ---- - -**Note: This parser only supports import from NPM Audit v7 or newer.** - -Node Package Manager (NPM) Audit plugin output file can be imported in -JSON format. Only imports the \'vulnerabilities\' subtree. - -### File Types -This parser expects a JSON file. Can only import NPM Audit files from NPM Audit v7 or newer. It aims to provide the same -information as the non-JSON formatted output. - -Attempting to import a file from a version less than 7 of NPM Audit will raise an error message. - -### Command Used To Generate Output -Either of these commands will work: -- \`npm audit --json\` -- \`npm audit fix --dry-run --json\` - -### Sample Scan Data -Sample NPM Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/npm_audit_7_plus). - -### Link To Tool -See NPM-Audit-Report on GitHub: https://github.com/npm/npm-audit-report/ diff --git a/docs/content/en/integrations/parsers/file/pip_audit.md b/docs/content/en/integrations/parsers/file/pip_audit.md index 96b9b250d5..df24cdbe7a 100644 --- a/docs/content/en/integrations/parsers/file/pip_audit.md +++ b/docs/content/en/integrations/parsers/file/pip_audit.md @@ -2,41 +2,7 @@ title: "pip-audit Scan" toc_hide: true --- - -Import pip-audit JSON scan report. - -### File Types -This parser expects a JSON file. - -The parser can handle legacy and current JSON format. - -The current format has added a `dependencies` element: - - { - "dependencies": [ - { - "name": "pyopenssl", - "version": "23.1.0", - "vulns": [] - }, - ... - ] - ... - } - -The legacy format does not include the `dependencies` key: - - [ - { - "name": "adal", - "version": "1.2.2", - "vulns": [] - }, - ... - ] +Import pip-audit JSON scan report ### Sample Scan Data -Sample pip-audit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pip_audit). - -### Link To Tool -[pip-audit](https://pypi.org/project/pip-audit/) +Sample pip-audit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pip_audit). \ No newline at end of file diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index 0f5b7676c7..c2b491eb1a 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -33,6 +33,12 @@ def process_endpoints_view(request, host_view=False, vulnerable=False): if vulnerable: endpoints = Endpoint.objects.filter( + finding__active=True, + finding__verified=True, + finding__out_of_scope=False, + finding__mitigated__isnull=True, + finding__false_p=False, + finding__duplicate=False, status_endpoint__mitigated=False, status_endpoint__false_positive=False, status_endpoint__out_of_scope=False, @@ -118,12 +124,12 @@ def process_endpoint_view(request, eid, host_view=False): endpoints = endpoint.host_endpoints() endpoint_metadata = None all_findings = endpoint.host_findings() - active_findings = endpoint.host_active_findings() + active_verified_findings = endpoint.host_active_verified_findings() else: endpoints = None endpoint_metadata = dict(endpoint.endpoint_meta.values_list('name', 'value')) all_findings = endpoint.findings.all() - active_findings = endpoint.active_findings() + active_verified_findings = endpoint.active_verified_findings() if all_findings: start_date = timezone.make_aware(datetime.combine(all_findings.last().date, datetime.min.time())) @@ -142,8 +148,12 @@ def process_endpoint_view(request, eid, host_view=False): monthly_counts = get_period_counts(all_findings, closed_findings, None, months_between, start_date, relative_delta='months') - paged_findings = get_page_items(request, active_findings, 25) - vulnerable = active_findings.count() != 0 + paged_findings = get_page_items(request, active_verified_findings, 25) + + vulnerable = False + + if active_verified_findings.count() != 0: + vulnerable = True product_tab = Product_Tab(endpoint.product, "Host" if host_view else "Endpoint", tab="endpoints") return render(request, diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 7b20a8cc10..4f7360fc46 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1,5 +1,4 @@ import logging -from typing import Any from dojo.utils import add_error_message_to_response, get_system_setting, to_str_typed import os import io @@ -696,13 +695,6 @@ def prepare_jira_issue_fields( def add_jira_issue(obj, *args, **kwargs): - def failure_to_add_message(message: str, exception: Exception, object: Any) -> bool: - if exception: - logger.exception(exception) - logger.error(message) - log_jira_alert(message, obj) - return False - logger.info('trying to create a new jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -710,7 +702,9 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - return failure_to_add_message(message, None, obj) + logger.error(message) + log_jira_alert(message, obj) + return False jira_project = get_jira_project(obj) jira_instance = get_jira_instance(obj) @@ -725,23 +719,19 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b logger.warning("The JIRA issue will NOT be created.") return False logger.debug('Trying to create a new JIRA issue for %s...', to_str_typed(obj)) - # Attempt to get the jira connection + meta = None try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) - except Exception as e: - message = f"The following jira instance could not be connected: {jira_instance} - {e.text}" - return failure_to_add_message(message, e, obj) - # Set the list of labels to set on the jira issue - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - # Determine what due date to set on the jira issue - duedate = None - if System_Settings.objects.get().enable_finding_sla: - duedate = obj.sla_deadline() - # Set the fields that will compose the jira issue - try: + + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + + duedate = None + if System_Settings.objects.get().enable_finding_sla: + duedate = obj.sla_deadline() + issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -757,40 +747,16 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b duedate=duedate, issuetype_fields=issuetype_fields, default_assignee=jira_project.default_assignee) - except TemplateDoesNotExist as e: - message = f"Failed to find a jira issue template to be used - {e}" - return failure_to_add_message(message, e, obj) - except Exception as e: - message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" - return failure_to_add_message(message, e, obj) - # Create a new issue in Jira with the fields set in the last step - try: + logger.debug('sending fields to JIRA: %s', fields) new_issue = jira.create_issue(fields) - logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) - j_issue = JIRA_Issue(jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) - j_issue.set_obj(obj) - j_issue.jira_creation = timezone.now() - j_issue.jira_change = timezone.now() - j_issue.save() - jira.issue(new_issue.id) - logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) - except Exception as e: - message = f"Failed to create jira issue with the following payload: {fields} - {e}" - return failure_to_add_message(message, e, obj) - # Attempt to set a default assignee - try: if jira_project.default_assignee: created_assignee = str(new_issue.get_field('assignee')) logger.debug("new issue created with assignee %s", created_assignee) if created_assignee != jira_project.default_assignee: jira.assign_issue(new_issue.key, jira_project.default_assignee) - except Exception as e: - message = f"Failed to assign the default user: {jira_project.default_assignee} - {e}" - # Do not return here as this should be a soft failure that should be logged - failure_to_add_message(message, e, obj) - # Upload dojo finding screenshots to Jira - try: + + # Upload dojo finding screenshots to Jira findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() @@ -805,22 +771,7 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - except Exception as e: - message = f"Failed to attach attachments to the jira issue: {e}" - # Do not return here as this should be a soft failure that should be logged - failure_to_add_message(message, e, obj) - # Add any notes that already exist in the finding to the JIRA - try: - for find in findings: - if find.notes.all(): - for note in find.notes.all().reverse(): - add_comment(obj, note) - except Exception as e: - message = f"Failed to add notes to the jira ticket: {e}" - # Do not return here as this should be a soft failure that should be logged - failure_to_add_message(message, e, obj) - # Determine whether to assign this new jira issue to a mapped epic - try: + if jira_project.enable_engagement_epic_mapping: eng = obj.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -829,11 +780,36 @@ def failure_to_add_message(message: str, exception: Exception, object: Any) -> b add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(new_issue.id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) - except Exception as e: - message = f"Failed to assign jira issue to existing epic: {e}" - return failure_to_add_message(message, e, obj) - return True + # only link the new issue if it was successfully created, incl attachments and epic link + logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) + j_issue = JIRA_Issue( + jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) + j_issue.set_obj(obj) + + j_issue.jira_creation = timezone.now() + j_issue.jira_change = timezone.now() + j_issue.save() + jira.issue(new_issue.id) + + logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) + + # Add any notes that already exist in the finding to the JIRA + for find in findings: + if find.notes.all(): + for note in find.notes.all().reverse(): + add_comment(obj, note) + + return True + except TemplateDoesNotExist as e: + logger.exception(e) + log_jira_alert(str(e), obj) + return False + except JIRAError as e: + logger.exception(e) + logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe + log_jira_alert(e.text, obj) + return False # we need two separate celery tasks due to the decorators we're using to map to/from ids @@ -855,13 +831,6 @@ def update_jira_issue_for_finding_group(finding_group, *args, **kwargs): def update_jira_issue(obj, *args, **kwargs): - def failure_to_update_message(message: str, exception: Exception, obj: Any) -> bool: - if exception: - logger.exception(exception) - logger.error(message) - log_jira_alert(message, obj) - return False - logger.debug('trying to update a linked jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -872,22 +841,21 @@ def failure_to_update_message(message: str, exception: Exception, obj: Any) -> b if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - return failure_to_update_message(message, None, obj) + logger.error(message) + log_jira_alert(message, obj) + return False j_issue = obj.jira_issue + meta = None try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) issue = jira.issue(j_issue.jira_id) - except Exception as e: - message = f"The following jira instance could not be connected: {jira_instance} - {e}" - return failure_to_update_message(message, e, obj) - # Set the list of labels to set on the jira issue - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - # Set the fields that will compose the jira issue - try: + + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -900,38 +868,26 @@ def failure_to_update_message(message: str, exception: Exception, obj: Any) -> b # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority_name=jira_priority(obj), issuetype_fields=issuetype_fields) - except Exception as e: - message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" - return failure_to_update_message(message, e, obj) - # Update the issue in jira - try: + logger.debug('sending fields to JIRA: %s', fields) + issue.update( summary=fields['summary'], description=fields['description'], # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority=fields['priority'], fields=fields) - j_issue.jira_change = timezone.now() - j_issue.save() - except Exception as e: - message = f"Failed to update the jira issue with the following payload: {fields} - {e}" - return failure_to_update_message(message, e, obj) - # Update the status in jira - try: + push_status_to_jira(obj, jira_instance, jira, issue) - except Exception as e: - message = f"Failed to update the jira issue status - {e}" - return failure_to_update_message(message, e, obj) - # Upload dojo finding screenshots to Jira - try: + + # Upload dojo finding screenshots to Jira findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() for find in findings: for pic in get_file_images(find): - # It doesn't look like the celery container has anything in the media + # It doesn't look like the celery cotainer has anything in the media # folder. Has this feature ever worked? try: jira_attachment( @@ -939,12 +895,7 @@ def failure_to_update_message(message: str, exception: Exception, obj: Any) -> b settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - except Exception as e: - message = f"Failed to attach attachments to the jira issue: {e}" - # Do not return here as this should be a soft failure that should be logged - failure_to_update_message(message, e, obj) - # Determine whether to assign this new jira issue to a mapped epic - try: + if jira_project.enable_engagement_epic_mapping: eng = find.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -953,11 +904,20 @@ def failure_to_update_message(message: str, exception: Exception, obj: Any) -> b add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(j_issue.jira_id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) - except Exception as e: - message = f"Failed to assign jira issue to existing epic: {e}" - return failure_to_update_message(message, e, obj) - return True + j_issue.jira_change = timezone.now() + j_issue.save() + + logger.debug('Updated the following linked jira issue for %d:%s', find.id, find.title) + return True + + except JIRAError as e: + logger.exception(e) + logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe + if issue_from_jira_is_active(issue): + # Only alert if the upstream JIRA is active, we don't care about closed issues + log_jira_alert(e.text, obj) + return False def get_jira_issue_from_jira(find): diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index 92e365e334..ab26c8cbdb 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -3748,7 +3748,7 @@ msgid "" "tags, references, languages or technologies contain the search query and " "products whose\n" " name, tags or description contain the " -"search query.
Advanced search operators: (Restrict results to a certain " +"search query.
Advanced search operators: (Restrict results to a certain " "type) product:,\n" " engagement:, finding:, endpoint:, tag:, " "language:, technology: or vulnerability_id:.\n" diff --git a/dojo/models.py b/dojo/models.py index 36a7d2e520..362ec399b6 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -1124,7 +1124,7 @@ def endpoint_count(self): endpoints = getattr(self, 'active_endpoints', None) if endpoints: return len(self.active_endpoints) - return 0 + return None def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: diff --git a/dojo/product/views.py b/dojo/product/views.py index 6291540342..ee7c3b35e8 100755 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -112,11 +112,8 @@ def prefetch_for_product(prods): prefetched_prods = prefetched_prods.prefetch_related('members') prefetched_prods = prefetched_prods.prefetch_related('prod_type__members') active_endpoint_query = Endpoint.objects.filter( - status_endpoint__mitigated=False, - status_endpoint__false_positive=False, - status_endpoint__out_of_scope=False, - status_endpoint__risk_accepted=False, - ).distinct() + finding__active=True, + finding__mitigated__isnull=True).distinct() prefetched_prods = prefetched_prods.prefetch_related( Prefetch('endpoint_set', queryset=active_endpoint_query, to_attr='active_endpoints')) prefetched_prods = prefetched_prods.prefetch_related('tags') @@ -326,15 +323,15 @@ def finding_querys(request, prod): end_date = timezone.now() week = end_date - timedelta(days=7) # seven days and /newer are considered "new" - filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) filters['verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters['new_verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['all'] = findings_qs.order_by("date") + filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['all'] = findings_qs filters['open_vulns'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter( cwe__isnull=False, ).order_by('cwe').values( @@ -479,7 +476,6 @@ def view_product_metrics(request, pid): add_breadcrumb(parent=prod, top_level=False, request=request) - # An ordered dict does not make sense here. open_close_weekly = OrderedDict() severity_weekly = OrderedDict() critical_weekly = OrderedDict() @@ -487,83 +483,81 @@ def view_product_metrics(request, pid): medium_weekly = OrderedDict() open_objs_by_severity = get_zero_severity_level() - closed_objs_by_severity = get_zero_severity_level() accepted_objs_by_severity = get_zero_severity_level() - for finding in filters.get("all", []): - iso_cal = finding.date.isocalendar() - date = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) - html_date = date.strftime("%m/%d
%Y
") - unix_timestamp = (tcalendar.timegm(date.timetuple()) * 1000) - - # Open findings - if finding in filters.get("open", []): - if unix_timestamp not in critical_weekly: - critical_weekly[unix_timestamp] = {'count': 0, 'week': html_date} - if unix_timestamp not in high_weekly: - high_weekly[unix_timestamp] = {'count': 0, 'week': html_date} - if unix_timestamp not in medium_weekly: - medium_weekly[unix_timestamp] = {'count': 0, 'week': html_date} - - if unix_timestamp in open_close_weekly: - open_close_weekly[unix_timestamp]['open'] += 1 + for v in filters.get('open', None): + iso_cal = v.date.isocalendar() + x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) + y = x.strftime("%m/%d
%Y
") + x = (tcalendar.timegm(x.timetuple()) * 1000) + if x not in critical_weekly: + critical_weekly[x] = {'count': 0, 'week': y} + if x not in high_weekly: + high_weekly[x] = {'count': 0, 'week': y} + if x not in medium_weekly: + medium_weekly[x] = {'count': 0, 'week': y} + + if x in open_close_weekly: + if v.mitigated: + open_close_weekly[x]['closed'] += 1 else: - open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 1, 'accepted': 0} - open_close_weekly[unix_timestamp]['week'] = html_date + open_close_weekly[x]['open'] += 1 + else: + if v.mitigated: + open_close_weekly[x] = {'closed': 1, 'open': 0, 'accepted': 0} + else: + open_close_weekly[x] = {'closed': 0, 'open': 1, 'accepted': 0} + open_close_weekly[x]['week'] = y - if view == 'Finding': - severity = finding.severity - elif view == 'Endpoint': - severity = finding.finding.severity + if view == 'Finding': + severity = v.severity + elif view == 'Endpoint': + severity = v.finding.severity - if unix_timestamp in severity_weekly: - if severity in severity_weekly[unix_timestamp]: - severity_weekly[unix_timestamp][severity] += 1 - else: - severity_weekly[unix_timestamp][severity] = 1 + if x in severity_weekly: + if severity in severity_weekly[x]: + severity_weekly[x][severity] += 1 else: - severity_weekly[unix_timestamp] = get_zero_severity_level() - severity_weekly[unix_timestamp][severity] = 1 - severity_weekly[unix_timestamp]['week'] = html_date + severity_weekly[x][severity] = 1 + else: + severity_weekly[x] = get_zero_severity_level() + severity_weekly[x][severity] = 1 + severity_weekly[x]['week'] = y - if severity == 'Critical': - if unix_timestamp in critical_weekly: - critical_weekly[unix_timestamp]['count'] += 1 - else: - critical_weekly[unix_timestamp] = {'count': 1, 'week': html_date} - elif severity == 'High': - if unix_timestamp in high_weekly: - high_weekly[unix_timestamp]['count'] += 1 - else: - high_weekly[unix_timestamp] = {'count': 1, 'week': html_date} - elif severity == 'Medium': - if unix_timestamp in medium_weekly: - medium_weekly[unix_timestamp]['count'] += 1 - else: - medium_weekly[unix_timestamp] = {'count': 1, 'week': html_date} - # Optimization: count severity level on server side - if open_objs_by_severity.get(finding.severity) is not None: - open_objs_by_severity[finding.severity] += 1 - # Close findings - if finding in filters.get("closed", []): - if unix_timestamp in open_close_weekly: - open_close_weekly[unix_timestamp]['closed'] += 1 + if severity == 'Critical': + if x in critical_weekly: + critical_weekly[x]['count'] += 1 else: - open_close_weekly[unix_timestamp] = {'closed': 1, 'open': 0, 'accepted': 0} - open_close_weekly[unix_timestamp]['week'] = html_date - # Optimization: count severity level on server side - if closed_objs_by_severity.get(finding.severity) is not None: - closed_objs_by_severity[finding.severity] += 1 - # Risk Accepted findings - if finding in filters.get("accepted", []): - if unix_timestamp in open_close_weekly: - open_close_weekly[unix_timestamp]['accepted'] += 1 + critical_weekly[x] = {'count': 1, 'week': y} + elif severity == 'High': + if x in high_weekly: + high_weekly[x]['count'] += 1 else: - open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 0, 'accepted': 1} - open_close_weekly[unix_timestamp]['week'] = html_date - # Optimization: count severity level on server side - if accepted_objs_by_severity.get(finding.severity) is not None: - accepted_objs_by_severity[finding.severity] += 1 + high_weekly[x] = {'count': 1, 'week': y} + elif severity == 'Medium': + if x in medium_weekly: + medium_weekly[x]['count'] += 1 + else: + medium_weekly[x] = {'count': 1, 'week': y} + + # Optimization: count severity level on server side + if open_objs_by_severity.get(v.severity) is not None: + open_objs_by_severity[v.severity] += 1 + + for a in filters.get('accepted', None): + iso_cal = a.date.isocalendar() + x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) + y = x.strftime("%m/%d
%Y
") + x = (tcalendar.timegm(x.timetuple()) * 1000) + + if x in open_close_weekly: + open_close_weekly[x]['accepted'] += 1 + else: + open_close_weekly[x] = {'closed': 0, 'open': 0, 'accepted': 1} + open_close_weekly[x]['week'] = y + + if accepted_objs_by_severity.get(a.severity) is not None: + accepted_objs_by_severity[a.severity] += 1 test_data = {} for t in tests: @@ -590,7 +584,7 @@ def view_product_metrics(request, pid): 'inactive_objs': filters.get('inactive', None), 'inactive_objs_by_severity': sum_by_severity_level(filters.get('inactive')), 'closed_objs': filters.get('closed', None), - 'closed_objs_by_severity': closed_objs_by_severity, + 'closed_objs_by_severity': sum_by_severity_level(filters.get('closed')), 'false_positive_objs': filters.get('false_positive', None), 'false_positive_objs_by_severity': sum_by_severity_level(filters.get('false_positive')), 'out_of_scope_objs': filters.get('out_of_scope', None), diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 54e83542eb..c2d85ec397 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1187,7 +1187,6 @@ def saml2_attrib_map_format(dict): 'Nexpose Scan': ['title', 'severity', 'vulnerability_ids', 'cwe'], # possible improvement: in the scanner put the library name into file_path, then dedup on cwe + file_path + severity 'NPM Audit Scan': ['title', 'severity', 'file_path', 'vulnerability_ids', 'cwe'], - 'NPM Audit v7+ Scan': ['title', 'severity', 'cwe', 'vuln_id_from_tool'], # possible improvement: in the scanner put the library name into file_path, then dedup on cwe + file_path + severity 'Yarn Audit Scan': ['title', 'severity', 'file_path', 'vulnerability_ids', 'cwe'], # possible improvement: in the scanner put the library name into file_path, then dedup on vulnerability_ids + file_path + severity @@ -1281,7 +1280,6 @@ def saml2_attrib_map_format(dict): 'Tenable Scan': True, 'Nexpose Scan': True, 'NPM Audit Scan': True, - 'NPM Audit v7+ Scan': True, 'Yarn Audit Scan': True, 'Mend Scan': True, 'ZAP Scan': False, @@ -1364,12 +1362,10 @@ def saml2_attrib_map_format(dict): 'CargoAudit Scan': DEDUPE_ALGO_HASH_CODE, 'Checkmarx Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx Scan': DEDUPE_ALGO_HASH_CODE, - 'Checkmarx One Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx OSA': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, 'Codechecker Report native': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Coverity API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Cobalt.io API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, - 'Crunch42 Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Dependency Track Finding Packaging Format (FPF) Export': DEDUPE_ALGO_HASH_CODE, 'Mobsfscan Scan': DEDUPE_ALGO_HASH_CODE, 'SonarQube Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, @@ -1381,7 +1377,6 @@ def saml2_attrib_map_format(dict): 'Tenable Scan': DEDUPE_ALGO_HASH_CODE, 'Nexpose Scan': DEDUPE_ALGO_HASH_CODE, 'NPM Audit Scan': DEDUPE_ALGO_HASH_CODE, - 'NPM Audit v7+ Scan': DEDUPE_ALGO_HASH_CODE, 'Yarn Audit Scan': DEDUPE_ALGO_HASH_CODE, 'Mend Scan': DEDUPE_ALGO_HASH_CODE, 'ZAP Scan': DEDUPE_ALGO_HASH_CODE, diff --git a/dojo/static/dojo/js/metrics.js b/dojo/static/dojo/js/metrics.js index 2e95555d37..392ad2ac6f 100644 --- a/dojo/static/dojo/js/metrics.js +++ b/dojo/static/dojo/js/metrics.js @@ -1618,6 +1618,8 @@ function open_close_weekly(opened, closed, accepted, ticks) { var options = { xaxes: [{ ticks: ticks, + transform: function(v) { return -v; }, + inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1659,6 +1661,8 @@ function severity_weekly(critical, high, medium, low, info, ticks) { var options = { xaxes: [{ ticks: ticks, + transform: function(v) { return -v; }, + inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1709,6 +1713,8 @@ function severity_counts_weekly(critical, high, medium, ticks) { var options = { xaxes: [{ ticks: ticks, + transform: function(v) { return -v; }, + inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 diff --git a/dojo/templates/base.html b/dojo/templates/base.html index 2f1cace966..f4043d42e3 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -765,8 +765,10 @@

diff --git a/dojo/templates/dojo/endpoints.html b/dojo/templates/dojo/endpoints.html index 6597e1f747..ecaaef6d52 100644 --- a/dojo/templates/dojo/endpoints.html +++ b/dojo/templates/dojo/endpoints.html @@ -87,7 +87,7 @@

{% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} {% dojo_sort request 'Product' 'product' 'asc' %} {% endif %} - Active (Verified) Findings + Active Verified Findings Status @@ -117,10 +117,13 @@

{% endif %} {% if host_view %} - {{ e.host_active_findings_count }} ({{ e.host_active_verified_findings_count }}) + {{ e.host_active_verified_findings_count }} {% else %} - {{ e.active_findings_count }} - ({{ e.active_verified_findings_count }}) + {% if e.active_verified_findings_count > 0 %} + {{ e.active_verified_findings_count }} + {% else %} + 0 + {% endif %} {% endif %} @@ -130,10 +133,10 @@

{% if e.mitigated %} Mitigated {% else %} - {% if e.active_findings_count > 0 %} + {% if e.active_verified_findings_count > 0 %} Vulnerable {% else %} - No active findings + No active verified findings {% endif %} {% endif %} {% endif %} diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index d022812de8..e328557c87 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -248,8 +248,12 @@

{% endif %} + {% if prod.endpoint_count %} {{ prod.endpoint_host_count }} / {{ prod.endpoint_count }} + {% else %} + 0 + {% endif %} {% if prod.product_manager %} diff --git a/dojo/templates/dojo/view_endpoint.html b/dojo/templates/dojo/view_endpoint.html index d09261e5ec..30d974b8a6 100644 --- a/dojo/templates/dojo/view_endpoint.html +++ b/dojo/templates/dojo/view_endpoint.html @@ -103,7 +103,7 @@

  - Finding Age ({{ all_findings|length|apnumber }} + Finding Age ({{ all_findings|length|apnumber }} verified finding{{ all_findings|length|pluralize }})
@@ -178,9 +178,9 @@

{% if item %} {% if item.vulnerable %} - + {% else %} - + {% endif %}  {{ item|url_shortner }}{% if endpoint.is_broken %} 🚩{% endif %} {% endif %} @@ -248,7 +248,7 @@

Additional Information
-

Open Findings

+

Active Verified Findings

{% if findings %}
diff --git a/dojo/tools/checkmarx/parser.py b/dojo/tools/checkmarx/parser.py index 4f1f07d725..d8be5b8b68 100755 --- a/dojo/tools/checkmarx/parser.py +++ b/dojo/tools/checkmarx/parser.py @@ -58,7 +58,7 @@ def _get_findings_xml(self, filename, test): language = "" findingdetail = "" group = "" - find_date = parser.parse(root.get("ScanStart")).date() + find_date = parser.parse(root.get("ScanStart")) if query.get("Language") is not None: language = query.get("Language") @@ -389,9 +389,9 @@ def get_findings(self, file, test): def _parse_date(self, value): if isinstance(value, str): - return parser.parse(value).date() + return parser.parse(value) elif isinstance(value, dict) and isinstance(value.get("seconds"), int): - return datetime.datetime.utcfromtimestamp(value.get("seconds")).date() + return datetime.datetime.utcfromtimestamp(value.get("seconds")) else: return None diff --git a/dojo/tools/checkmarx_one/__init__.py b/dojo/tools/checkmarx_one/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dojo/tools/checkmarx_one/parser.py b/dojo/tools/checkmarx_one/parser.py deleted file mode 100644 index 699ac64e42..0000000000 --- a/dojo/tools/checkmarx_one/parser.py +++ /dev/null @@ -1,110 +0,0 @@ -import datetime -import json -from dateutil import parser -from dojo.models import Finding - - -class CheckmarxOneParser(object): - def get_scan_types(self): - return ["Checkmarx One Scan"] - - def get_label_for_scan_types(self, scan_type): - return scan_type - - def get_description_for_scan_types(self, scan_type): - return "Checkmarx One Scan" - - def _parse_date(self, value): - if isinstance(value, str): - return parser.parse(value) - elif isinstance(value, dict) and isinstance(value.get("seconds"), int): - return datetime.datetime.utcfromtimestamp(value.get("seconds")) - else: - return None - - def get_findings(self, file, test): - data = json.load(file) - findings = [] - if "vulnerabilities" in data: - results = data.get("vulnerabilities", []) - for result in results: - id = result.get("identifiers")[0].get("value") - cwe = None - if 'vulnerabilityDetails' in result: - cwe = result.get("vulnerabilites").get("cweId") - severity = result.get("severity") - locations_uri = result.get("location").get("file") - locations_startLine = result.get("location").get("start_line") - locations_endLine = result.get("location").get("end_line") - finding = Finding( - unique_id_from_tool=id, - file_path=locations_uri, - title=id + "_" + locations_uri, - test=test, - cwe=cwe, - severity=severity, - description="**id**: " + str(id) + "\n" - + "**uri**: " + locations_uri + "\n" - + "**startLine**: " + str(locations_startLine) + "\n" - + "**endLine**: " + str(locations_endLine) + "\n", - false_p=False, - duplicate=False, - out_of_scope=False, - static_finding=True, - dynamic_finding=False, - ) - findings.append(finding) - elif "results" in data: - results = data.get("results", []) - for vulnerability in results: - result_type = vulnerability.get("type") - date = self._parse_date(vulnerability.get("firstFoundAt")) - cwe = None - if 'vulnerabilityDetails' in vulnerability: - cwe = vulnerability.get("vulnerabilites", {}).get("cweId") - if result_type == "sast": - descriptionDetails = vulnerability.get("description") - file_path = vulnerability.get("data").get("nodes")[0].get("fileName") - finding = Finding( - description=descriptionDetails, - title=descriptionDetails, - file_path=file_path, - date=date, - cwe=cwe, - severity=vulnerability.get("severity").title(), - test=test, - static_finding=True, - ) - if vulnerability.get("id"): - finding.unique_id_from_tool = ( - vulnerability.get("id") - ) - else: - finding.unique_id_from_tool = str( - vulnerability.get("similarityId") - ) - findings.append(finding) - if result_type == "kics": - description = vulnerability.get("description") - file_path = vulnerability.get("data").get("filename") - finding = Finding( - title=f'{description}', - description=description, - date=date, - cwe=cwe, - severity=vulnerability.get("severity").title(), - verified=vulnerability.get("state") != "TO_VERIFY", - file_path=file_path, - test=test, - static_finding=True, - ) - if vulnerability.get("id"): - finding.unique_id_from_tool = vulnerability.get( - "id" - ) - else: - finding.unique_id_from_tool = str( - vulnerability.get("similarityId") - ) - findings.append(finding) - return findings diff --git a/dojo/tools/crunch42/__init__.py b/dojo/tools/crunch42/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dojo/tools/crunch42/parser.py b/dojo/tools/crunch42/parser.py deleted file mode 100644 index e1a841e29a..0000000000 --- a/dojo/tools/crunch42/parser.py +++ /dev/null @@ -1,88 +0,0 @@ -import json -from dojo.models import Finding - - -class Crunch42Parser(object): - - def get_scan_types(self): - return ["Crunch42 Scan"] - - def get_label_for_scan_types(self, scan_type): - return "Crunch42 Scan" - - def get_description_for_scan_types(self, scan_type): - return "Import JSON output of Crunch42 scan report." - - def parse_json(self, json_output): - try: - data = json_output.read() - try: - tree = json.loads(str(data, "utf-8")) - except Exception: - tree = json.loads(data) - except Exception: - raise ValueError("Invalid format") - - return tree - - def process_tree(self, tree, test): - return list(self.get_items(tree, test)) if tree else [] - - def get_findings(self, filename, test): - reportTree = self.parse_json(filename) - - if isinstance(reportTree, list): - temp = [] - for moduleTree in reportTree: - temp += self.process_tree(moduleTree, test) - return temp - else: - return self.process_tree(reportTree, test) - - def get_items(self, tree, test): - items = {} - iterator = 0 - if "report" in tree and tree["report"].get("security"): - results = tree["report"].get("security").get("issues") - for key, node in results.items(): - for issue in node["issues"]: - item = self.get_item( - issue, key, test - ) - items[iterator] = item - iterator += 1 - return list(items.values()) - - def get_item(self, issue, title, test): - fingerprint = issue["fingerprint"] - pointer = issue["pointer"] - message = issue["specificDescription"] if 'specificDescription' in issue else title - score = issue["score"] - criticality = issue["criticality"] - if criticality == 1: - severity = "Info" - elif criticality == 2: - severity = "Low" - elif criticality == 3: - severity = "Medium" - elif criticality <= 4: - severity = "High" - else: - severity = "Critical" - # create the finding object - finding = Finding( - unique_id_from_tool=fingerprint, - title=title, - test=test, - severity=severity, - description="**fingerprint**: " + str(fingerprint) + "\n" - + "**pointer**: " + str(pointer) + "\n" - + "**message**: " + str(message) + "\n" - + "**score**: " + str(score) + "\n", - false_p=False, - duplicate=False, - out_of_scope=False, - static_finding=True, - dynamic_finding=False, - ) - return finding diff --git a/dojo/tools/npm_audit_7_plus/__init__.py b/dojo/tools/npm_audit_7_plus/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dojo/tools/npm_audit_7_plus/parser.py b/dojo/tools/npm_audit_7_plus/parser.py deleted file mode 100644 index c72c01cfad..0000000000 --- a/dojo/tools/npm_audit_7_plus/parser.py +++ /dev/null @@ -1,225 +0,0 @@ -"""Parser for NPM Audit v7+ Scan.""" -import json -import logging -from dojo.models import Finding - -logger = logging.getLogger(__name__) - -''' -the npm audit json output depends on the params used. this parser -accepts the formats for any of: - -npm audit --json -npm audit fix --dry-run --json -npm audit --dry-run --json - -In order for this parser to import the same number of findings -as the report's meta block indicates, all top level keys -are consiered a vulnerability and as much information as provided -is added to each -''' - - -class NpmAudit7PlusParser(object): - """Represents the parser class.""" - - def get_scan_types(self): - """Return the scan type.""" - return ["NPM Audit v7+ Scan"] - - def get_label_for_scan_types(self, scan_type): - """Return the scan label.""" - return scan_type # no custom label for now - - def get_description_for_scan_types(self, scan_type): - """Return the scan description.""" - return "NPM Audit Scan json output from v7 and above." - - def get_findings(self, json_output, test): - """Return the findings gathered from file upload.""" - tree = self.parse_json(json_output) - return self.get_items(tree, test) - - def parse_json(self, json_output): - """Parse the json format to get findings.""" - if json_output is None: - return - try: - data = json_output.read() - try: - tree = json.loads(str(data, "utf-8")) - except Exception: - tree = json.loads(data) - except Exception: - raise ValueError("Invalid format, unable to parse json.") - - # output from npm audit fix --dry-run --json - if tree.get("audit"): - if not tree.get("audit").get("auditReportVersion"): - raise ValueError( - ("This parser only supports output from npm audit version" - " 7 and above.") - ) - subtree = tree.get("audit").get("vulnerabilities") - # output from npm audit --dry-run --json - # or - # output from npm audit --json - else: - if not tree.get("auditReportVersion"): - raise ValueError( - ("This parser only supports output from npm audit version" - " 7 and above.") - ) - subtree = tree.get("vulnerabilities") - - return subtree - - def get_items(self, tree, test): - """Return the individual items found in report.""" - items = {} - - for key, node in tree.items(): - item = get_item(node, tree, test) - unique_key = item.title + item.severity - items[unique_key] = item - - return list(items.values()) - - -def get_item(item_node, tree, test): - """Return the individual Findigns from items found in report.""" - references = [] - mitigation = "" - test = test - static_finding = True - title = "" - unique_id_from_tool = "" - cvssv3 = "" - cwe = "" - - if item_node["severity"] == "low": - severity = "Low" - elif item_node["severity"] == "moderate": - severity = "Medium" - elif item_node["severity"] == "high": - severity = "High" - elif item_node["severity"] == "critical": - severity = "Critical" - else: - severity = "Info" - - if item_node["via"] and isinstance(item_node["via"][0], str): - # this is a top level key (a vulnerability) - title = item_node["name"] - cwe = "CWE-1035" # default - component_name = title - - elif item_node["via"] and isinstance(item_node["via"][0], dict): - title = item_node["via"][0]["title"] - component_name = item_node["nodes"][0] - cwe = item_node["via"][0]["cwe"][0] - references.append(item_node["via"][0]["url"]) - unique_id_from_tool = str(item_node["via"][0]["source"]) - cvssv3 = item_node["via"][0]["cvss"]["vectorString"] - - if isinstance(item_node["fixAvailable"], dict): - fix_name = item_node["fixAvailable"]["name"] - fix_version = item_node["fixAvailable"]["version"] - mitigation = "Update {0} to version {1}".format(fix_name, fix_version) - else: - mitigation = "No specific mitigation provided by tool." - - description = get_vuln_description(item_node, tree) - - if (item_node["via"] and - isinstance(item_node["via"][0], dict) and - len(item_node["via"]) > 1): - # we have a multiple CWE vuln which we will capture in the - # vulnerability_ids and references - for vuln in item_node["via"][1:]: # have to decide if str or object - if isinstance(vuln, dict): - references.append(vuln["url"]) - - if len(cwe): - cwe = int(cwe.split("-")[1]) - - dojo_finding = Finding( - title=title, - test=test, - severity=severity, - description=description, - cwe=cwe, - mitigation=mitigation, - references=", ".join(references), - component_name=component_name, - false_p=False, - duplicate=False, - out_of_scope=False, - mitigated=None, - impact="No impact provided", - static_finding=static_finding, - dynamic_finding=False, - vuln_id_from_tool=unique_id_from_tool, - ) - - if (cvssv3 is not None) and (len(cvssv3) > 0): - dojo_finding.cvssv3 = cvssv3 - - return dojo_finding - - -def get_vuln_description(item_node, tree): - """Make output pretty of details.""" - effects_handled = [] - description = "" - - description += (item_node["name"] + " " + - item_node["range"] + "\n") - description += "Severity: " + item_node["severity"] + "\n" - - for via in item_node["via"]: - if isinstance(via, str): - description += ("Depends on vulnerable versions of " + - via + "\n") - else: - description += (via["title"] + " - " + via["url"] + "\n") - - if isinstance(item_node["fixAvailable"], dict): - fix_name = item_node["fixAvailable"]["name"] - fix_version = item_node["fixAvailable"]["version"] - mitigation = "Fix Available: Update {0} to version {1}".format( - fix_name, fix_version) - else: - mitigation = "No specific mitigation provided by tool." - - description += mitigation + "\n" - - for node in item_node["nodes"]: - description += node + "\n" - - for effect in item_node["effects"]: - # look up info in the main tree - description += (" " + tree[effect]["name"] + " " + - tree[effect]["range"] + "\n") - effects_handled.append(tree[effect]["name"]) - for ev in tree[effect]["via"]: - if isinstance(ev, dict): - if tree[effect]["name"] != ev["name"]: - description += (" Depends on vulnerable versions of " + - ev["name"] + "\n") - else: - if tree[effect]["name"] != ev: - description += (" Depends on vulnerable versions of " + - ev + "\n") - for en in tree[effect]["nodes"]: - description += " " + en + "\n" - - for ee in tree[effect]["effects"]: - if ee in effects_handled: - continue # already added to description - description += (" " + tree[ee]["name"] + " " + - tree[ee]["range"] + "\n") - for en in tree[effect]["nodes"]: - description += " " + en + "\n" - - return description diff --git a/dojo/tools/pip_audit/parser.py b/dojo/tools/pip_audit/parser.py index 4b3ffba9b1..726667987f 100644 --- a/dojo/tools/pip_audit/parser.py +++ b/dojo/tools/pip_audit/parser.py @@ -1,110 +1,70 @@ -"""Parser for pip-audit.""" import json from dojo.models import Finding class PipAuditParser: - """Represents a file parser capable of ingesting pip-audit results.""" - def get_scan_types(self): - """Return the type of scan this parser ingests.""" return ["pip-audit Scan"] def get_label_for_scan_types(self, scan_type): - """Return the friendly name for this parser.""" return "pip-audit Scan" def get_description_for_scan_types(self, scan_type): - """Return the description for this parser.""" return "Import pip-audit JSON scan report." def requires_file(self, scan_type): - """Return boolean indicating if parser requires a file to process.""" return True def get_findings(self, scan_file, test): - """Return the collection of Findings ingested.""" data = json.load(scan_file) - findings = None - # this parser can handle two distinct formats see sample scan files - if "dependencies" in data: - # new format of report - findings = get_file_findings(data, test) - else: - # legacy format of report - findings = get_legacy_findings(data, test) - - return findings - - -def get_file_findings(data, test): - """Return the findings in the vluns array inside the dependencies key.""" - findings = list() - for dependency in data["dependencies"]: - item_findings = get_item_findings(dependency, test) - if item_findings is not None: - findings.extend(item_findings) - return findings - - -def get_legacy_findings(data, test): - """Return the findings gathered from the vulns element.""" - findings = list() - for item in data: - item_findings = get_item_findings(item, test) - if item_findings is not None: - findings.extend(item_findings) - return findings + findings = list() + for item in data: + vulnerabilities = item.get("vulns", []) + if vulnerabilities: + component_name = item["name"] + component_version = item.get("version") + for vulnerability in vulnerabilities: + vuln_id = vulnerability.get("id") + vuln_fix_versions = vulnerability.get("fix_versions") + vuln_description = vulnerability.get("description") + + title = ( + f"{vuln_id} in {component_name}:{component_version}" + ) + + description = "" + description += vuln_description + + mitigation = None + if vuln_fix_versions: + mitigation = "Upgrade to version:" + if len(vuln_fix_versions) == 1: + mitigation += f" {vuln_fix_versions[0]}" + else: + for fix_version in vuln_fix_versions: + mitigation += f"\n- {fix_version}" + + finding = Finding( + test=test, + title=title, + cwe=1352, + severity="Medium", + description=description, + mitigation=mitigation, + component_name=component_name, + component_version=component_version, + vuln_id_from_tool=vuln_id, + static_finding=True, + dynamic_finding=False, + ) + vulnerability_ids = list() + if vuln_id: + vulnerability_ids.append(vuln_id) + if vulnerability_ids: + finding.unsaved_vulnerability_ids = vulnerability_ids + + findings.append(finding) -def get_item_findings(item, test): - """Return list of Findings.""" - findings = list() - vulnerabilities = item.get("vulns", []) - if vulnerabilities: - component_name = item["name"] - component_version = item.get("version") - for vulnerability in vulnerabilities: - vuln_id = vulnerability.get("id") - vuln_fix_versions = vulnerability.get("fix_versions") - vuln_description = vulnerability.get("description") - - title = ( - f"{vuln_id} in {component_name}:{component_version}" - ) - - description = "" - description += vuln_description - - mitigation = None - if vuln_fix_versions: - mitigation = "Upgrade to version:" - if len(vuln_fix_versions) == 1: - mitigation += f" {vuln_fix_versions[0]}" - else: - for fix_version in vuln_fix_versions: - mitigation += f"\n- {fix_version}" - - finding = Finding( - test=test, - title=title, - cwe=1395, - severity="Medium", - description=description, - mitigation=mitigation, - component_name=component_name, - component_version=component_version, - vuln_id_from_tool=vuln_id, - static_finding=True, - dynamic_finding=False, - ) - vulnerability_ids = list() - if vuln_id: - vulnerability_ids.append(vuln_id) - if vulnerability_ids: - finding.unsaved_vulnerability_ids = vulnerability_ids - - findings.append(finding) - - return findings + return findings diff --git a/dojo/utils.py b/dojo/utils.py index b41c82966a..25cf46d2af 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1575,12 +1575,7 @@ def __init__(self, product, title=None, tab=None): active=True, mitigated__isnull=True).count() active_endpoints = Endpoint.objects.filter( - product=self.product, - status_endpoint__mitigated=False, - status_endpoint__false_positive=False, - status_endpoint__out_of_scope=False, - status_endpoint__risk_accepted=False, - ) + product=self.product, finding__active=True, finding__mitigated__isnull=True) self.endpoints_count = active_endpoints.distinct().count() self.endpoint_hosts_count = active_endpoints.values('host').distinct().count() self.benchmark_type = Benchmark_Type.objects.filter( diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 2ce3da7473..e5eb9b0e92 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 appVersion: "2.33.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.116-dev +version: 1.6.115-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap diff --git a/unittests/scans/checkmarx_one/checkmarx_one.json b/unittests/scans/checkmarx_one/checkmarx_one.json deleted file mode 100644 index a9e432abf6..0000000000 --- a/unittests/scans/checkmarx_one/checkmarx_one.json +++ /dev/null @@ -1,284 +0,0 @@ -{ - "scan": { - "end_time": "2024-01-18T09:12:43", - "analyzer": { - "id": "CxOne-SAST", - "name": "Checkmarx", - "url": "https://checkmarx.com/", - "vendor": { - "name": "Checkmarx" - }, - "version": "2.0.63" - }, - "scanner": { - "id": "CxOne-SAST", - "name": "Checkmarx", - "vendor": { - "name": "Checkmarx" - }, - "version": "2.0.63" - }, - "start_time": "2024-01-18T09:12:43", - "status": "success", - "type": "sast" - }, - "schema": "https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/parsers/security/validators/schemas/15.0.0/sast-report-format.json", - "version": "15.0.0", - "vulnerabilities": [ - { - "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", - "category": "Checkmarx-sast", - "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", - "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Constants.ts:450", - "description": "The application stores data makeKey on the client, in an insecure manner, at line 115 of /src/helpers/Utility.ts.", - "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", - "severity": "Medium", - "confidence": "Medium", - "solution": "", - "scanner": { - "id": "Checkmarx-sast", - "name": "Checkmarx-sast", - "vendor": { - "name": "" - }, - "version": "" - }, - "identifiers": [ - { - "type": "cxOneScan", - "name": "CxOne Scan", - "url": "https://ast.checkmarx.net/projects/4c5703d8-dddf-11ee-8275-bb5b871f4ca1/scans?id=56efc3de-dddf-11ee-91f7-17d54222fb10\u0026branch=release%2FRC-6", - "value": "511341974" - } - ], - "links": [], - "tracking": { - "type": "source", - "items": [ - { - "signatures": [ - { - "algorithm": "sast-Algorithm ", - "value": "NA" - } - ], - "file": "/src/helpers/Constants.ts", - "end_line": 451, - "start_line": 450 - } - ] - }, - "flags": [], - "location": { - "file": "/src/helpers/Constants.ts", - "start_line": 450, - "end_line": 451, - "class": "" - } - }, - { - "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", - "category": "Checkmarx-sast", - "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", - "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Helper.ts:349", - "description": "The application stores data Key on the client, in an insecure manner, at line 349 of /src/helpers/Helper.ts.", - "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", - "severity": "Medium", - "confidence": "Medium", - "solution": "", - "scanner": { - "id": "Checkmarx-sast", - "name": "Checkmarx-sast", - "vendor": { - "name": "" - }, - "version": "" - }, - "identifiers": [ - { - "type": "cxOneScan", - "name": "CxOne Scan", - "url": "https://ast.checkmarx.net/projects/7c649cf6-dde0-11ee-a703-43244b0a9879/scans?id=86fc33ea-dde0-11ee-ba5f-3beb4c589dd3\u0026branch=release%2FRC-6", - "value": "832413795" - } - ], - "links": [], - "tracking": { - "type": "source", - "items": [ - { - "signatures": [ - { - "algorithm": "sast-Algorithm ", - "value": "NA" - } - ], - "file": "/src/helpers/Helper.ts", - "end_line": 350, - "start_line": 339 - } - ] - }, - "flags": [], - "location": { - "file": "/src/helpers/Helper.ts", - "start_line": 349, - "end_line": 350, - "class": "" - } - }, - { - "id": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", - "category": "Checkmarx-sast", - "name": "Use_Of_Hardcoded_Password", - "message": "Use_Of_Hardcoded_Password@/src/pages/UserError_test.tsx:71", - "description": "The application uses the hard-coded password \u0026#34;testPassword\u0026#34; for authentication purposes, either using it to verify users\u0026#39; identities, or to access another remote system. This password at line 71 of /src/pages/UserError_test.tsx appears in the code, implying it is accessible to anyone with source code access, and cannot be changed without rebuilding the application.\n\n", - "cve": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", - "severity": "Low", - "confidence": "Low", - "solution": "", - "scanner": { - "id": "Checkmarx-sast", - "name": "Checkmarx-sast", - "vendor": { - "name": "" - }, - "version": "" - }, - "identifiers": [ - { - "type": "cxOneScan", - "name": "CxOne Scan", - "url": "https://ast.checkmarx.net/projects/53d5b99a-dde1-11ee-ab71-9be9755a4da6/scans?id=5e592014-dde1-11ee-8985-f37d989e23db\u0026branch=release%2FRC-6", - "value": "143486243" - } - ], - "links": [], - "tracking": { - "type": "source", - "items": [ - { - "signatures": [ - { - "algorithm": "sast-Algorithm ", - "value": "NA" - } - ], - "file": "/src/pages/UserError_test.tsx", - "end_line": 72, - "start_line": 71 - } - ] - }, - "flags": [], - "location": { - "file": "/src/pages/UserError_test.tsx", - "start_line": 71, - "end_line": 72, - "class": "" - } - }, - { - "id": "Client_Hardcoded_Domain:/public/index.html:32", - "category": "Checkmarx-sast", - "name": "Client_Hardcoded_Domain", - "message": "Client_Hardcoded_Domain@/public/index.html:32", - "description": "The JavaScript file imported in https://fonts.googleapis.com/icon?family=Material+Icons in /public/index.html at line 32 is from a remote domain, which may allow attackers to replace its contents with malicious code.", - "cve": "Client_Hardcoded_Domain:/public/index.html:32", - "severity": "Info", - "confidence": "Info", - "solution": "", - "scanner": { - "id": "Checkmarx-sast", - "name": "Checkmarx-sast", - "vendor": { - "name": "" - }, - "version": "" - }, - "identifiers": [ - { - "type": "cxOneScan", - "name": "CxOne Scan", - "url": "https://ast.checkmarx.net/projects/34480339-8f8c-4b68-b8fb-4eea09a2045d/scans?id=78adc5f1-0864-411e-b8d6-bfa134458bd8\u0026branch=release%2Fpilot-1", - "value": "2595392" - } - ], - "links": [], - "tracking": { - "type": "source", - "items": [ - { - "signatures": [ - { - "algorithm": "sast-Algorithm ", - "value": "NA" - } - ], - "file": "/public/index.html", - "end_line": 87, - "start_line": 32 - } - ] - }, - "flags": [], - "location": { - "file": "/public/index.html", - "start_line": 32, - "end_line": 87, - "class": "" - } - }, - { - "id": "Client_DOM_XSS:/src/app/App_test.tsx:744", - "category": "Checkmarx-sast", - "name": "Client_DOM_XSS", - "message": "Client_DOM_XSS@/src/app/App_test.tsx:744", - "description": "The method TrustMe embeds untrusted data in generated output with location, at line 298 of /src/app/App_test.tsx. This untrusted data is embedded into the output without proper sanitization or encoding, enabling an attacker to inject malicious code into the generated web-page.\n\n", - "cve": "Client_DOM_XSS:/src/app/App_test.tsx:744", - "severity": "Info", - "confidence": "Info", - "solution": "", - "scanner": { - "id": "Checkmarx-sast", - "name": "Checkmarx-sast", - "vendor": { - "name": "" - }, - "version": "" - }, - "identifiers": [ - { - "type": "cxOneScan", - "name": "CxOne Scan", - "url": "https://ast.checkmarx.net/projects/38ebbafc-dde2-11ee-ae0c-b72e7e0d42ae/scans?id=42ff549a-dde2-11ee-8c8c-83e0db45059d\u0026branch=release%2FRC-6", - "value": "836714351" - } - ], - "links": [], - "tracking": { - "type": "source", - "items": [ - { - "signatures": [ - { - "algorithm": "sast-Algorithm ", - "value": "NA" - } - ], - "file": "/src/app/App_test.tsx", - "end_line": 746, - "start_line": 744 - } - ] - }, - "flags": [], - "location": { - "file": "/src/app/App_test.tsx", - "start_line": 744, - "end_line": 746, - "class": "" - } - } - ] -} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/many_findings.json b/unittests/scans/checkmarx_one/many_findings.json deleted file mode 100644 index 13a030e2e3..0000000000 --- a/unittests/scans/checkmarx_one/many_findings.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "results": [ - { - "type": "kics", - "label": "IaC Security", - "id": "98727183", - "similarityId": "fbed62efe2786d647806451d0480f57b4bc08786633fb73c29579faee8f9d252", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "HIGH", - "created": "2023-11-21T10:07:38Z", - "firstFoundAt": "2022-12-26T09:31:48Z", - "foundAt": "2023-11-21T10:07:38Z", - "firstScanId": "79cd6248-ddcc-11ee-80c3-c34e822ea27f", - "description": "A user should be specified in the dockerfile, otherwise the image will run as root", - "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", - "data": { - "queryId": "94d39580-ddcc-11ee-b570-27d2d85c4cb8 [Taken from query_id]", - "queryName": "Missing User Instruction", - "group": "Build Process [Taken from category]", - "line": 1, - "platform": "Dockerfile", - "issueType": "MissingAttribute", - "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", - "value": "The 'Dockerfile' does not contain any 'USER' instruction", - "filename": "/qe/testharness/Dockerfile" - }, - "comments": {}, - "vulnerabilityDetails": { - "cvss": {} - } - }, - { - "type": "kics", - "label": "IaC Security", - "id": "28307228", - "similarityId": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "HIGH", - "created": "2023-11-21T10:07:38Z", - "firstFoundAt": "2022-12-26T09:31:48Z", - "foundAt": "2023-11-21T10:07:38Z", - "firstScanId": "811759c2-ddd7-11ee-9b56-d34cc93fb257", - "description": "A user should be specified in the dockerfile, otherwise the image will run as root", - "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", - "data": { - "queryId": "5d2efac8-ddd8-11ee-9117-b34a238abecc [Taken from query_id]", - "queryName": "Missing User Instruction", - "group": "Build Process [Taken from category]", - "line": 1, - "platform": "Dockerfile", - "issueType": "MissingAttribute", - "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", - "value": "The 'Dockerfile' does not contain any 'USER' instruction", - "filename": "/qe/testharness/Dockerfile" - }, - "comments": {}, - "vulnerabilityDetails": { - "cvss": {} - } - }, - { - "type": "sast", - "label": "sast", - "id": "04894977", - "similarityId": "697307927", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "MEDIUM", - "created": "2023-11-21T09:16:10Z", - "firstFoundAt": "2022-03-17T14:45:41Z", - "foundAt": "2023-11-21T09:16:10Z", - "firstScanId": "9d120bda-ddd8-11ee-bd4c-8b5b82bf6c89", - "description": "Method getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\n\n", - "descriptionHTML": "\u003cp\u003eMethod getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\u003c/p\u003e\n", - "data": { - "queryId": 12956636075206043460, - "queryName": "Privacy_Violation", - "group": "Java_Medium_Threat", - "resultHash": "2417044825981779912395719508", - "languageName": "Java", - "nodes": [ - { - "id": "9823731082518796021644390089", - "line": 96, - "name": "secret", - "column": 48, - "length": 12, - "method": "getObject", - "nodeID": 55222, - "domType": "ParamDecl", - "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", - "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", - "typeName": "String", - "methodLine": 96, - "definitions": "1" - }, - { - "id": "ahpeiL2gaeboi8aqueiv8liekah=", - "line": 48, - "name": "secret", - "column": 71, - "length": 12, - "method": "getObject", - "nodeID": 55222, - "domType": "UnknownReference", - "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", - "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", - "typeName": "String", - "methodLine": 76, - "definitions": "1" - }, - { - "id": "Aewo6hui2ek5guNgaesie4ioPha=", - "line": 56, - "name": "error", - "column": 27, - "length": 12, - "method": "getObject", - "nodeID": 55222, - "domType": "MethodInvokeExpr", - "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", - "fullName": "com.example.api.clients.ObjectsManagerUtil.log.error", - "typeName": "error", - "methodLine": 96, - "definitions": "0" - } - ] - }, - "comments": {}, - "vulnerabilityDetails": { - "cweId": 359, - "cvss": {}, - "compliances": [ - "FISMA 2014", - "NIST SP 800-53", - "OWASP Top 10 2013", - "OWASP Top 10 2017", - "OWASP Top 10 2021", - "PCI DSS v3.2.1", - "ASD STIG 4.10" - ] - } - }, - { - "type": "kics", - "label": "IaC Security", - "id": "9930754", - "similarityId": "df0b5ce1f88f1af07e63731e0a9628920a008ea0ca4bbd117d75a3cdbdd283ff", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "MEDIUM", - "created": "2023-11-21T10:07:38Z", - "firstFoundAt": "2022-08-01T08:30:25Z", - "foundAt": "2023-11-21T10:07:38Z", - "firstScanId": "eff24b42-ddda-11ee-9e73-83b44de11797", - "description": "Incoming container traffic should be bound to a specific host interface", - "descriptionHTML": "\u003cp\u003eIncoming container traffic should be bound to a specific host interface\u003c/p\u003e\n", - "data": { - "queryId": "fd070ec6-ddda-11ee-a521-73cad7abf17a [Taken from query_id]", - "queryName": "Container Traffic Not Bound To Host Interface", - "group": "Networking and Firewall [Taken from category]", - "line": 16, - "platform": "DockerCompose", - "issueType": "IncorrectValue", - "expectedValue": "Docker compose file to have 'ports' attribute bound to a specific host interface.", - "value": "Docker compose file doesn't have 'ports' attribute bound to a specific host interface", - "filename": "/qe/integration/docker-compose.yml" - }, - "comments": {}, - "vulnerabilityDetails": { - "cvss": {} - } - }, - { - "type": "sast", - "label": "sast", - "id": "47966330", - "similarityId": "2994069268", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "LOW", - "created": "2023-11-21T09:16:10Z", - "firstFoundAt": "2023-02-09T09:32:55Z", - "foundAt": "2023-11-21T09:16:10Z", - "firstScanId": "4f9f7b28-dddb-11ee-b736-53a846e9935e", - "description": "Method getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\n\n", - "descriptionHTML": "\u003cp\u003eMethod getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\u003c/p\u003e\n", - "data": { - "queryId": 7846472296093057013, - "queryName": "Heap_Inspection", - "group": "Java_Low_Visibility", - "resultHash": "oochiuquiede0IeVeijaWooTieh=", - "languageName": "Java", - "nodes": [ - { - "id": "Oec6Nie9ool0too4chieNoh5zoo=", - "line": 84, - "name": "testPassword", - "column": 18, - "length": 12, - "method": "getClient", - "nodeID": 6459, - "domType": "Declarator", - "fileName": "/qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java", - "fullName": "com.example.api.integrationtests.utils.IntegratHelper.getClient.testPassword", - "typeName": "char", - "methodLine": 35, - "definitions": "1" - } - ] - }, - "comments": {}, - "vulnerabilityDetails": { - "cweId": 244, - "cvss": {}, - "compliances": [ - "OWASP Top 10 2013", - "OWASP Top 10 2021", - "ASD STIG 4.10" - ] - } - }, - { - "type": "kics", - "label": "IaC Security", - "id": "87775678", - "similarityId": "d2b3d5c205f6e52f7588c4ecab08caec2a9d53dc2ded74e1fffd9f2ebf3fa203", - "status": "RECURRENT", - "state": "TO_VERIFY", - "severity": "LOW", - "created": "2023-11-21T10:07:38Z", - "firstFoundAt": "2023-01-05T09:31:43Z", - "foundAt": "2023-11-21T10:07:38Z", - "firstScanId": "82a21764-dddc-11ee-9364-1f3a853093bf", - "description": "Ensure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working", - "descriptionHTML": "\u003cp\u003eEnsure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working\u003c/p\u003e\n", - "data": { - "queryId": "90b50eba-dddc-11ee-acec-cf20c0abdb94 [Taken from query_id]", - "queryName": "Healthcheck Instruction Missing", - "group": "Insecure Configurations [Taken from category]", - "line": 1, - "platform": "Dockerfile", - "issueType": "MissingAttribute", - "expectedValue": "Dockerfile should contain instruction 'HEALTHCHECK'", - "value": "Dockerfile doesn't contain instruction 'HEALTHCHECK'", - "filename": "/qe/unitests/Dockerfile" - }, - "comments": {}, - "vulnerabilityDetails": { - "cvss": {} - } - } - ], - "totalCount": 6, - "scanID": "fc1ab89e-ddc8-11ee-96d4-97cff7d4e776" -} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/no_findings.json b/unittests/scans/checkmarx_one/no_findings.json deleted file mode 100644 index c526fa4dc0..0000000000 --- a/unittests/scans/checkmarx_one/no_findings.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "results": [ - ], - "totalCount": 0, - "scanID": "4fc677bc-dddd-11ee-8004-6fd4f0411f73" -} \ No newline at end of file diff --git a/unittests/scans/crunch42/crunch42_many_findings.json b/unittests/scans/crunch42/crunch42_many_findings.json deleted file mode 100644 index 1ea3aca89f..0000000000 --- a/unittests/scans/crunch42/crunch42_many_findings.json +++ /dev/null @@ -1,251 +0,0 @@ -{ - "end": "1709535630", - "report": { - "index": [ - "/components/security/ApiKey", - "/paths/~1integration-test~1generate/post/security/0/ApiKeyAuth", - "/paths/~1integration-test~1health/get/security", - "/paths/~1integration-test~1invalidate/delete/security/0/ApiKeyAuth", - "/paths/~1integration-test~1ping/get/security", - "/paths/~1integration-test~1refresh/get/security/0/ApiKeyAuth", - "/paths/~1integration-test~1refresh/put/security/0/ApiKeyAuth", - "/paths/~1integration-test~1verify/get/security/0/ApiKeyAuth" - ], - "assessmentVersion": "3.1.6", - "assessmentReportVersion": "1.0.1", - "commit": "ahso2mom3neiviungoh4ENgahXie2Aer4ain5oba-E", - "oasVersion": "3.0.0", - "apiVersion": "1.0.0", - "fileId": "c65d4166-ddf7-11ee-a7f6-bf9763730afb", - "apiId": "", - "openapiState": "valid", - "score": 82.86, - "valid": true, - "criticality": 4, - "issueCounter": 8, - "minimalReport": false, - "maxEntriesPerIssue": 30, - "maxImpactedPerEntry": 30, - "security": { - "issueCounter": 8, - "score": 12.86, - "criticality": 4, - "issues": { - "v3-global-securityscheme-apikey-inheader": { - "description": "Transporting API keys in a header over network allowed", - "issues": [ - { - "score": 0, - "pointer": 0, - "tooManyImpacted": false, - "criticality": 1, - "request": true, - "fingerprint": "teephei0aes4ohxur7Atie6zuiCh9weeshue0kai" - } - ], - "issueCounter": 1, - "score": 0, - "criticality": 1, - "tooManyError": false - }, - "v3-operation-securityrequirement-apikey-inheader": { - "description": "Operation accepts API keys transported in a header over network", - "issues": [ - { - "score": -2.14, - "pointer": 1, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "Iibooquavie0hah0quoh7thooghiith7utoow6th" - }, - { - "score": -2.14, - "pointer": 3, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "roz6Iph0eiPaih1shooPi1geiyuziitei0aiGhed" - }, - { - "score": -2.14, - "pointer": 5, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "lae4iet6XeiyiSheeZof3sheik9lahdaiph7edah" - }, - { - "score": -2.14, - "pointer": 6, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "oNgie5Ieke9fiep6yochaT2ain8oona4xeiphiCh" - }, - { - "score": -2.14, - "pointer": 7, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "aiShievooyi1Gohn1aeque5Mae3aiBoh8oquaphe" - } - ], - "issueCounter": 5, - "score": -10.71, - "criticality": 3, - "tooManyError": false - }, - "v3-operation-securityrequirement-emptyarray": { - "description": "The security section contains an empty array", - "issues": [ - { - "specificDescription": "The security section of the operation 'get' contains an empty array", - "score": -3.21, - "pointer": 2, - "tooManyImpacted": false, - "criticality": 4, - "request": true, - "fingerprint": "oofushaeQuiev6Shegai2roh0ceighae5Daij7pi" - }, - { - "specificDescription": "The security section of the operation 'get' contains an empty array", - "score": -3.21, - "pointer": 4, - "tooManyImpacted": false, - "criticality": 4, - "request": true, - "fingerprint": "Eife6Tu5liequiec8AhZ6booGheegh5oShues2bi" - } - ], - "issueCounter": 2, - "score": -6.43, - "criticality": 4, - "tooManyError": false - } - }, - "subgroupIssueCounter": { - "authentication": { - "none": 0, - "info": 1, - "low": 0, - "medium": 5, - "high": 2, - "critical": 0 - }, - "authorization": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "transport": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } - } - }, - "data": { - "issueCounter": 0, - "score": 70, - "criticality": 0, - "issues": {}, - "subgroupIssueCounter": { - "parameters": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "responseHeader": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "responseDefinition": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "schema": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "paths": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } - } - }, - "issuesKey": [ - "v3-operation-securityrequirement-emptyarray", - "v3-global-securityscheme-apikey-inheader", - "v3-operation-securityrequirement-apikey-inheader" - ], - "summary": { - "oasVersion": "3.0.0", - "apiVersion": "1.0.0", - "basepath": "", - "apiName": "Example Authentication Service", - "description": "Authentication Service", - "endpoints": [ - "https://auth-dev-internal.example.com/", - "https://auth-dev-internal.example.com/" - ], - "pathCounter": 1, - "operationCounter": 7, - "parameterCounter": 4, - "requestBodyCounter": 0, - "schemesCounter": { - "https": 7 - }, - "requestContentType": {}, - "responseContentType": { - "application/json": 19 - }, - "securitySchemes": { - "ApiKeyAuth": { - "counterInsecure": 0, - "counterSecure": 5, - "type": "apiKey", - "apiKeyIn": "header", - "apiKeyName": "X-API-Key" - } - }, - "componentsSchemasCounter": 6, - "componentsResponsesCounter": 0, - "componentsParametersCounter": 2, - "componentsExamplesCounter": 0, - "componentsRequestBodiesCounter": 0, - "componentsHeadersCounter": 0, - "componentsSecuritySchemesCounter": 1, - "componentsLinksCounter": 0, - "componentsCallbacksCounter": 0 - } - }, - "start": "1702028474", - "taskId": "0ccd5572-ddf9-11ee-935d-d7d416afd73f" -} \ No newline at end of file diff --git a/unittests/scans/crunch42/crunch42_many_findings2.json b/unittests/scans/crunch42/crunch42_many_findings2.json deleted file mode 100644 index b9aa1f75fa..0000000000 --- a/unittests/scans/crunch42/crunch42_many_findings2.json +++ /dev/null @@ -1,442 +0,0 @@ -{ - "end": "2131451849", - "report": { - "index": [ - "/definitions/Objects/additionalProperties", - "/definitions/Objects/properties/all_objects/items", - "/definitions/ObjectsList/additionalProperties", - "/definitions/auth_claims", - "/definitions/auth_claims/additionalProperties", - "/definitions/auth_claims/properties/level/format", - "/paths/~1admin~1all_objects/get/parameters/0", - "/paths/~1admin~1all_objects/get/responses/403", - "/paths/~1admin~1all_objects/get/security/0/access-token", - "/paths/~1admin~1objects~1search/get/parameters/0", - "/paths/~1admin~1objects~1search/get/parameters/1", - "/paths/~1admin~1objects~1search/get/responses/403", - "/paths/~1admin~1objects~1search/get/security/0/access-token", - "/paths/~1login/post", - "/paths/~1login/post/parameters/0", - "/paths/~1login/post/parameters/1", - "/paths/~1register/post", - "/paths/~1object~1edit_info/put/parameters/1", - "/paths/~1object~1edit_info/put/responses/403", - "/paths/~1object~1edit_info/put/security/0/access-token", - "/paths/~1object~1info/get/security/0/access-token", - "/securityDefinitions/access-token" - ], - "assessmentVersion": "3.1.6", - "assessmentReportVersion": "1.0.1", - "commit": "theePhohphooQuoh6ii3naiS1Goalee9Chooghei-N", - "oasVersion": "2.0", - "apiVersion": "UAT-JWT-Validation", - "fileId": "2eeb479e-ddfa-11ee-9768-bb6e68d5b5fa", - "apiId": "", - "openapiState": "valid", - "score": 79.94, - "valid": true, - "criticality": 3, - "issueCounter": 13, - "warnings": { - "issues": { - "warning-global-schema-unused": { - "description": "Reusable schema definition is not used in the OpenAPI definition", - "totalIssues": 1, - "issues": [ - { - "pointer": 3, - "specificDescription": "The reusable schema definition 'acme_claims' is not used in the OpenAPI definition", - "fingerprint": "ahthi2Ahshaeghah2iewoo0aiF4quoath5Iej0ku" - } - ], - "tooManyError": false - }, - "warning-sample-undefined": { - "description": "No sample values or examples were provided for API Conformance Scan", - "totalIssues": 5, - "issues": [ - { - "pointer": 17, - "specificDescription": "No sample defined in the 'Parameter' object", - "fingerprint": "aereePheeb0puh5tahwoshi8Yei9woophahr7koh" - }, - { - "pointer": 9, - "specificDescription": "No sample defined in the 'Parameter' object", - "fingerprint": "aiseiquohNaik9aThae9oshu8te8ree9Yayie7Ha" - }, - { - "pointer": 10, - "specificDescription": "No sample defined in the 'Parameter' object", - "fingerprint": "thuf5Imiefe3aeTee4soh8quae8ahtho0ap8wen4" - }, - { - "pointer": 6, - "specificDescription": "No sample defined in the 'Parameter' object", - "fingerprint": "faeti4aide0ahTho0shiixo5cheipha9Eigahr3s" - }, - { - "pointer": 14, - "specificDescription": "No sample defined in the 'Parameter' object", - "fingerprint": "Dei9Ahraer7iech8iuk6eeyeero8quea3nahc8ah" - } - ], - "tooManyError": false - }, - "warning-schema-additionalproperties-boolean": { - "description": "Schema defines additionalProperties as a boolean value", - "totalIssues": 3, - "issues": [ - { - "pointer": 2, - "specificDescription": "", - "fingerprint": "shoo1diedoh2aex6mivi9geab9saeyoo7Dae6oth" - }, - { - "pointer": 4, - "specificDescription": "", - "fingerprint": "ooreiz0gepaeSephah6ToN8eC7tioseez4auQu3U" - }, - { - "pointer": 0, - "specificDescription": "", - "fingerprint": "aedaal8uu5aabuohuoSheidoonohSheef2iquee6" - } - ], - "tooManyError": false - }, - "warning-schema-format-improper": { - "description": "Schema format is not applicable to the schema's type", - "totalIssues": 1, - "issues": [ - { - "pointer": 5, - "specificDescription": "The format 'int32' of the schema is not applicable to the schema's type 'number'", - "fingerprint": "va8Lieweu5SieTh1ahcoole0Nahhai5ivaechith" - } - ], - "tooManyError": false - } - } - }, - "operationsNoAuthentication": [ - 13, - 16 - ], - "minimalReport": false, - "maxEntriesPerIssue": 30, - "maxImpactedPerEntry": 30, - "security": { - "issueCounter": 5, - "score": 20, - "criticality": 3, - "issues": { - "global-securityscheme-apikey-inheader": { - "description": "Transporting API keys in a header over network allowed", - "issues": [ - { - "score": 0, - "pointer": 21, - "tooManyImpacted": false, - "criticality": 1, - "request": true, - "fingerprint": "auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje" - } - ], - "issueCounter": 1, - "score": 0, - "criticality": 1, - "tooManyError": false - }, - "operation-securityrequirement-apikey-inheader": { - "description": "Operation accepts API keys transported in a header over network", - "issues": [ - { - "score": -2.5, - "pointer": 8, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "Eima0iu4xaatoh1lohboophohpheiBai1iR0opei" - }, - { - "score": -2.5, - "pointer": 12, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "Ud1ohcetah5iongai8yee0veishogai2vuQuu7me" - }, - { - "score": -2.5, - "pointer": 19, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "wooN7xoof5bieChie9Aech5ohm4eerae1enu6ohr" - }, - { - "score": -2.5, - "pointer": 20, - "tooManyImpacted": false, - "criticality": 3, - "request": true, - "fingerprint": "eeliequooliexohfookosang7hooruR4pae9Aiph" - } - ], - "issueCounter": 4, - "score": -10, - "criticality": 3, - "tooManyError": false - } - }, - "subgroupIssueCounter": { - "authentication": { - "none": 0, - "info": 1, - "low": 0, - "medium": 4, - "high": 0, - "critical": 0 - }, - "authorization": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "transport": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } - } - }, - "data": { - "issueCounter": 8, - "score": 59.94, - "criticality": 3, - "issues": { - "parameter-string-maxlength": { - "description": "String parameter has no maximum length defined", - "issues": [ - { - "specificDescription": "String parameter 'user' has no maximum length defined", - "score": -1.87, - "pointer": 14, - "tooManyImpacted": false, - "pointersAffected": [ - 13 - ], - "criticality": 3, - "request": true, - "fingerprint": "eeT0queiSahchohc5meik9Zoomoolah6Weo3phes" - }, - { - "specificDescription": "String parameter 'pass' has no maximum length defined", - "score": -1.87, - "pointer": 15, - "tooManyImpacted": false, - "pointersAffected": [ - 13 - ], - "criticality": 3, - "request": true, - "fingerprint": "ohvieX1AhzuphoocheeVoi0echoGh9coo7thai1o" - } - ], - "issueCounter": 2, - "score": -3.73, - "criticality": 3, - "tooManyError": false - }, - "parameter-string-pattern": { - "description": "String parameter has no pattern defined", - "issues": [ - { - "specificDescription": "String parameter 'user' has no pattern defined", - "score": -2.8, - "pointer": 14, - "tooManyImpacted": false, - "pointersAffected": [ - 13 - ], - "criticality": 3, - "request": true, - "fingerprint": "oveedeisohwahThae4Ier5oghaebaingai5iqueS" - }, - { - "specificDescription": "String parameter 'pass' has no pattern defined", - "score": -2.8, - "pointer": 15, - "tooManyImpacted": false, - "pointersAffected": [ - 13 - ], - "criticality": 3, - "request": true, - "fingerprint": "Iyung2laiGaish6kos6quiedeiX5uob3Bozee3mu" - } - ], - "issueCounter": 2, - "score": -5.6, - "criticality": 3, - "tooManyError": false - }, - "response-schema-undefined": { - "description": "Response that should contain a body has no schema defined", - "issues": [ - { - "score": -0.18, - "pointer": 7, - "tooManyImpacted": false, - "criticality": 3, - "response": true, - "fingerprint": "aeVahquu6chai1beaf9neithu8epha0Ohsh6echi" - }, - { - "score": -0.18, - "pointer": 11, - "tooManyImpacted": false, - "criticality": 3, - "response": true, - "fingerprint": "ai8Meishei0oHixuSucaiceL0aqu8uocahyahG6l" - }, - { - "score": -0.18, - "pointer": 18, - "tooManyImpacted": false, - "criticality": 3, - "response": true, - "fingerprint": "euN9zohhohPeesoY8ahbaichae6Ood0nohbio5ke" - } - ], - "issueCounter": 3, - "score": -0.53, - "criticality": 3, - "tooManyError": false - }, - "schema-response-object-without-properties": { - "description": "Schema of a JSON object in a response has no properties defined", - "issues": [ - { - "score": -0.2, - "pointer": 1, - "tooManyImpacted": false, - "criticality": 3, - "response": true, - "fingerprint": "ufuPheiyaelaePood3AeW8ooc3pooj2AiwaiCeil" - } - ], - "issueCounter": 1, - "score": -0.2, - "criticality": 3, - "tooManyError": false - } - }, - "subgroupIssueCounter": { - "parameters": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "responseHeader": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - }, - "responseDefinition": { - "none": 0, - "info": 0, - "low": 0, - "medium": 3, - "high": 0, - "critical": 0 - }, - "schema": { - "none": 0, - "info": 0, - "low": 0, - "medium": 1, - "high": 0, - "critical": 0 - }, - "paths": { - "none": 0, - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } - } - }, - "issuesKey": [ - "schema-response-object-without-properties", - "warning-schema-additionalproperties-boolean", - "parameter-string-pattern", - "parameter-string-maxlength", - "global-securityscheme-apikey-inheader", - "operation-securityrequirement-apikey-inheader", - "response-schema-undefined", - "warning-schema-format-improper", - "warning-sample-undefined", - "warning-global-schema-unused" - ], - "summary": { - "oasVersion": "2.0", - "apiVersion": "UAT-JWT-Validation", - "basepath": "", - "apiName": "Example App API", - "description": "Example Sharing API", - "endpoints": [ - "https//example.asia-1.cloud.provider.com/api" - ], - "pathCounter": 6, - "operationCounter": 6, - "parameterCounter": 4, - "requestBodyCounter": 3, - "schemesCounter": { - "https": 6 - }, - "requestContentType": { - "application/json": 2, - "application/x-www-form-urlencoded": 1 - }, - "responseContentType": { - "application/json": 16 - }, - "securitySchemes": { - "access-token": { - "counterInsecure": 0, - "counterSecure": 4, - "type": "apiKey", - "apiKeyIn": "header", - "apiKeyName": "x-access-token" - } - }, - "componentsSchemasCounter": 6, - "componentsResponsesCounter": 0, - "componentsParametersCounter": 0, - "componentsExamplesCounter": 0, - "componentsRequestBodiesCounter": 0, - "componentsHeadersCounter": 0, - "componentsSecuritySchemesCounter": 0, - "componentsLinksCounter": 0, - "componentsCallbacksCounter": 0 - } - }, - "start": "1693265564", - "taskId": "970e33ac-ddfc-11ee-a42e-af596b69b8f4" -} \ No newline at end of file diff --git a/unittests/scans/npm_audit_7_plus/many_vulns.json b/unittests/scans/npm_audit_7_plus/many_vulns.json deleted file mode 100644 index 2831c8be15..0000000000 --- a/unittests/scans/npm_audit_7_plus/many_vulns.json +++ /dev/null @@ -1,188 +0,0 @@ -{ - "auditReportVersion": 2, - "vulnerabilities": { - "@vercel/fun": { - "name": "@vercel/fun", - "severity": "moderate", - "isDirect": false, - "via": [ - "debug", - "semver" - ], - "effects": [ - "vercel" - ], - "range": "<=1.1.0", - "nodes": [ - "node_modules/@vercel/fun" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - }, - "@vercel/node": { - "name": "@vercel/node", - "severity": "low", - "isDirect": false, - "via": [ - "undici" - ], - "effects": [ - "vercel" - ], - "range": "2.14.0 || >=3.0.2", - "nodes": [ - "node_modules/@vercel/node" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - }, - "debug": { - "name": "debug", - "severity": "moderate", - "isDirect": false, - "via": [ - { - "source": 1094219, - "name": "debug", - "dependency": "debug", - "title": "Regular Expression Denial of Service in debug", - "url": "https://github.com/advisories/GHSA-gxpj-cx7g-858c", - "severity": "moderate", - "cwe": [ - "CWE-400" - ], - "cvss": { - "score": 5.3, - "vectorString": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" - }, - "range": ">=4.0.0 <4.3.1" - } - ], - "effects": [ - "@vercel/fun" - ], - "range": "4.0.0 - 4.3.0", - "nodes": [ - "node_modules/@vercel/fun/node_modules/debug" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - }, - "semver": { - "name": "semver", - "severity": "moderate", - "isDirect": false, - "via": [ - { - "source": 1096482, - "name": "semver", - "dependency": "semver", - "title": "semver vulnerable to Regular Expression Denial of Service", - "url": "https://github.com/advisories/GHSA-c2qf-rxjj-qqgw", - "severity": "moderate", - "cwe": [ - "CWE-1333" - ], - "cvss": { - "score": 5.3, - "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" - }, - "range": ">=7.0.0 <7.5.2" - } - ], - "effects": [ - "@vercel/fun" - ], - "range": "7.0.0 - 7.5.1", - "nodes": [ - "node_modules/@vercel/fun/node_modules/semver" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - }, - "undici": { - "name": "undici", - "severity": "low", - "isDirect": false, - "via": [ - { - "source": 1096586, - "name": "undici", - "dependency": "undici", - "title": "Undici proxy-authorization header not cleared on cross-origin redirect in fetch", - "url": "https://github.com/advisories/GHSA-3787-6prv-h9w3", - "severity": "low", - "cwe": [ - "CWE-200" - ], - "cvss": { - "score": 3.9, - "vectorString": "CVSS:3.1/AV:N/AC:H/PR:H/UI:R/S:U/C:L/I:L/A:L" - }, - "range": "<=5.28.2" - } - ], - "effects": [ - "@vercel/node" - ], - "range": "<=5.28.2", - "nodes": [ - "node_modules/undici" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - }, - "vercel": { - "name": "vercel", - "severity": "moderate", - "isDirect": true, - "via": [ - "@vercel/fun", - "@vercel/node" - ], - "effects": [], - "range": "28.12.3 || 29.0.1 - 29.0.3 || >=32.0.2", - "nodes": [ - "node_modules/vercel" - ], - "fixAvailable": { - "name": "vercel", - "version": "32.3.0", - "isSemVerMajor": true - } - } - }, - "metadata": { - "vulnerabilities": { - "info": 0, - "low": 2, - "moderate": 4, - "high": 0, - "critical": 0, - "total": 6 - }, - "dependencies": { - "prod": 737, - "dev": 306, - "optional": 153, - "peer": 50, - "peerOptional": 0, - "total": 1180 - } - } -} diff --git a/unittests/scans/npm_audit_7_plus/no_vuln.json b/unittests/scans/npm_audit_7_plus/no_vuln.json deleted file mode 100644 index 9f407cae89..0000000000 --- a/unittests/scans/npm_audit_7_plus/no_vuln.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "auditReportVersion": 2, - "vulnerabilities": { - }, - "metadata": { - "vulnerabilities": { - "info": 0, - "low": 0, - "moderate": 0, - "high": 0, - "critical": 0, - "total": 0 - }, - "dependencies": { - "prod": 98, - "dev": 0, - "optional": 0, - "peer": 0, - "peerOptional": 0, - "total": 97 - } - } -} diff --git a/unittests/scans/npm_audit_7_plus/one_vuln.json b/unittests/scans/npm_audit_7_plus/one_vuln.json deleted file mode 100644 index 89b48e280f..0000000000 --- a/unittests/scans/npm_audit_7_plus/one_vuln.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "auditReportVersion": 2, - "vulnerabilities": { - "debug": { - "name": "debug", - "severity": "high", - "isDirect": true, - "via": [ - { - "source": 1094222, - "name": "debug", - "dependency": "debug", - "title": "Regular Expression Denial of Service in debug", - "url": "https://github.com/advisories/GHSA-gxpj-cx7g-858c", - "severity": "moderate", - "cwe": [ - "CWE-400" - ], - "cvss": { - "score": 5.3, - "vectorString": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" - }, - "range": "<2.6.9" - }, - { - "source": 1094457, - "name": "debug", - "dependency": "debug", - "title": "debug Inefficient Regular Expression Complexity vulnerability", - "url": "https://github.com/advisories/GHSA-9vvw-cc9w-f27h", - "severity": "high", - "cwe": [ - "CWE-1333" - ], - "cvss": { - "score": 7.5, - "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" - }, - "range": "<2.6.9" - }, - "ms" - ], - "effects": [ - ], - "range": "<=2.6.8", - "nodes": [ - "node_modules/debug" - ], - "fixAvailable": { - "name": "express", - "version": "4.18.3", - "isSemVerMajor": false - } - } - }, - "metadata": { - "vulnerabilities": { - "info": 0, - "low": 0, - "moderate": 0, - "high": 1, - "critical": 0, - "total": 1 - }, - "dependencies": { - "prod": 98, - "dev": 0, - "optional": 0, - "peer": 0, - "peerOptional": 0, - "total": 97 - } - } -} - diff --git a/unittests/scans/pip_audit/empty_new.json b/unittests/scans/pip_audit/empty_new.json deleted file mode 100644 index 45f00a3dec..0000000000 --- a/unittests/scans/pip_audit/empty_new.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "dependencies":[] -} diff --git a/unittests/scans/pip_audit/many_vulns_new.json b/unittests/scans/pip_audit/many_vulns_new.json deleted file mode 100644 index 877ebf78ed..0000000000 --- a/unittests/scans/pip_audit/many_vulns_new.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "dependencies":[ - { - "name": "adal", - "version": "1.2.2", - "vulns": [] - }, - { - "name": "aiohttp", - "version": "3.6.2", - "vulns": [ - { - "id": "PYSEC-2021-76", - "fix_versions": [ - "3.7.4" - ], - "description": "aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows \"pip install aiohttp >= 3.7.4\". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications." - } - ] - }, - { - "name": "alabaster", - "version": "0.7.12", - "vulns": [] - }, - { - "name": "azure-devops", - "skip_reason": "Dependency not found on PyPI and could not be audited: azure-devops (0.17.0)" - }, - { - "name": "django", - "version": "3.2.9", - "vulns": [ - { - "id": "PYSEC-2021-439", - "fix_versions": [ - "2.2.25", - "3.1.14", - "3.2.10" - ], - "description": "In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths." - } - ] - }, - { - "name": "lxml", - "version": "4.6.4", - "vulns": [ - { - "id": "PYSEC-2021-852", - "fix_versions": [], - "description": "lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available." - } - ] - }, - { - "name": "twisted", - "version": "18.9.0", - "vulns": [ - { - "id": "PYSEC-2019-128", - "fix_versions": [ - "19.2.1" - ], - "description": "In Twisted before 19.2.1, twisted.web did not validate or sanitize URIs or HTTP methods, allowing an attacker to inject invalid characters such as CRLF." - }, - { - "id": "PYSEC-2020-260", - "fix_versions": [ - "20.3.0rc1" - ], - "description": "In Twisted Web through 19.10.0, there was an HTTP request splitting vulnerability. When presented with a content-length and a chunked encoding header, the content-length took precedence and the remainder of the request body was interpreted as a pipelined request." - }, - { - "id": "PYSEC-2019-129", - "fix_versions": [ - "19.7.0rc1" - ], - "description": "In words.protocols.jabber.xmlstream in Twisted through 19.2.1, XMPP support did not verify certificates when used with TLS, allowing an attacker to MITM connections." - }, - { - "id": "PYSEC-2020-259", - "fix_versions": [ - "20.3.0rc1" - ], - "description": "In Twisted Web through 19.10.0, there was an HTTP request splitting vulnerability. When presented with two content-length headers, it ignored the first header. When the second content-length value was set to zero, the request body was interpreted as a pipelined request." - } - ] - } - ] -} diff --git a/unittests/scans/pip_audit/zero_vulns_new.json b/unittests/scans/pip_audit/zero_vulns_new.json deleted file mode 100644 index f32e9b1b25..0000000000 --- a/unittests/scans/pip_audit/zero_vulns_new.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "dependencies":[ - { - "name": "adal", - "version": "1.2.2", - "vulns": [] - }, - { - "name": "alabaster", - "version": "0.7.12", - "vulns": [] - }, - { - "name": "azure-devops", - "skip_reason": "Dependency not found on PyPI and could not be audited: azure-devops (0.17.0)" - } - ] -} diff --git a/unittests/tools/test_checkmarx_one_parser.py b/unittests/tools/test_checkmarx_one_parser.py deleted file mode 100644 index 31d6fdbed5..0000000000 --- a/unittests/tools/test_checkmarx_one_parser.py +++ /dev/null @@ -1,47 +0,0 @@ -from dojo.models import Test -from dojo.tools.checkmarx_one.parser import CheckmarxOneParser -from ..dojo_test_case import DojoTestCase - - -class TestCheckmarxOneParser(DojoTestCase): - - def test_checkmarx_one_many_vulns(self): - with open("unittests/scans/checkmarx_one/checkmarx_one.json") as testfile: - parser = CheckmarxOneParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(5, len(findings)) - with self.subTest(i=0): - for finding in findings: - self.assertIsNotNone(finding.unique_id_from_tool) - self.assertIsNotNone(finding.title) - self.assertIsNotNone(finding.test) - self.assertIsNotNone(finding.date) - self.assertIsNotNone(finding.severity) - self.assertIsNotNone(finding.description) - finding_test = findings[0] - self.assertEqual("Medium", finding_test.severity) - self.assertEqual("/src/helpers/Constants.ts", finding_test.file_path) - - def test_checkmarx_one_many_findings(self): - with open("unittests/scans/checkmarx_one/many_findings.json") as testfile: - parser = CheckmarxOneParser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(6, len(findings)) - with self.subTest(i=0): - for finding in findings: - self.assertIsNotNone(finding.unique_id_from_tool) - self.assertIsNotNone(finding.title) - self.assertIsNotNone(finding.test) - self.assertIsNotNone(finding.date) - self.assertIsNotNone(finding.severity) - self.assertIsNotNone(finding.description) - finding_test = findings[0] - self.assertEqual("High", finding_test.severity) - self.assertEqual("/qe/testharness/Dockerfile", finding_test.file_path) - - def test_checkmarx_one_no_findings(self): - with open("unittests/scans/checkmarx_one/no_findings.json") as testfile: - parser = CheckmarxOneParser() - findings = parser.get_findings(testfile, Test()) - self.assertEqual(0, len(findings)) diff --git a/unittests/tools/test_checkmarx_parser.py b/unittests/tools/test_checkmarx_parser.py index f09e7d7da1..c43e24fb57 100644 --- a/unittests/tools/test_checkmarx_parser.py +++ b/unittests/tools/test_checkmarx_parser.py @@ -203,8 +203,8 @@ def check_parse_file_with_single_vulnerability_has_single_finding(self, findings item.file_path, ) # ScanStart - self.assertEqual(datetime.date, type(item.date)) - self.assertEqual(datetime.date(2018, 2, 25), item.date) + self.assertEqual(datetime.datetime, type(item.date)) + self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -293,7 +293,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.date(2018, 2, 25), finding.date) + self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) @@ -312,7 +312,7 @@ def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.date(2018, 2, 25), finding.date) + self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) self.assertEqual(50, finding.line) @@ -516,8 +516,8 @@ def check_parse_file_with_utf8_replacement_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.date, type(item.date)) - self.assertEqual(datetime.date(2018, 2, 25), item.date) + self.assertEqual(datetime.datetime, type(item.date)) + self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -665,8 +665,8 @@ def check_parse_file_with_utf8_various_non_ascii_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.date, type(item.date)) - self.assertEqual(datetime.date(2018, 2, 25), item.date) + self.assertEqual(datetime.datetime, type(item.date)) + self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -685,8 +685,8 @@ def test_file_with_multiple_findings_is_aggregated_with_query_id(self, mock): # ScanStart self.assertEqual("Client Potential ReDoS In Match (prettify.js)", finding.title) self.assertEqual("Low", finding.severity) - self.assertEqual(datetime.date, type(finding.date)) - self.assertEqual(datetime.date(2021, 11, 17), finding.date) + self.assertEqual(datetime.datetime, type(finding.date)) + self.assertEqual(datetime.datetime(2021, 11, 17, 13, 50, 45), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -705,8 +705,8 @@ def test_file_with_empty_filename(self, mock): # ScanStart self.assertEqual("Missing HSTS Header", finding.title) self.assertEqual("Medium", finding.severity) - self.assertEqual(datetime.date, type(finding.date)) - self.assertEqual(datetime.date(2021, 12, 24), finding.date) + self.assertEqual(datetime.datetime, type(finding.date)) + self.assertEqual(datetime.datetime(2021, 12, 24, 9, 12, 14), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -791,7 +791,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/challenges/challenge5/Assignment5.java", finding.file_path) self.assertEqual(61, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date) + self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) if finding.unique_id_from_tool == "SYlu22e7ZQydKJFOlC/o1EsyixQ=": with self.subTest(i="SYlu22e7ZQydKJFOlC/o1EsyixQ="): self.assertEqual("SQL Injection", finding.title) @@ -799,7 +799,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/sql-injection/src/main/java/org/owasp/webgoat/sql_injection/introduction/SqlInjectionLesson5.java", finding.file_path) self.assertEqual(72, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date) + self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) # test one in SCA part if finding.unique_id_from_tool == "GkVx1zoIKcd1EF72zqWrGzeVTmo=": with self.subTest(i="GkVx1zoIKcd1EF72zqWrGzeVTmo="): @@ -812,7 +812,7 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertIsNone(finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date) + self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) # test one in KICS part if finding.unique_id_from_tool == "eZrh18HAPbe2LbDAprSPrwncAC0=": with self.subTest(i="eZrh18HAPbe2LbDAprSPrwncAC0="): @@ -822,26 +822,4 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertEqual("/webgoat-server/Dockerfile", finding.file_path) - self.assertEqual(datetime.date(2022, 5, 6), finding.date) - - @patch('dojo.tools.checkmarx.parser.add_language') - def test_finding_date_should_be_date_xml(self, mock): - my_file_handle, product, engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding.xml" - ) - parser = CheckmarxParser() - parser.set_mode('detailed') - findings = parser.get_findings(my_file_handle, test) - self.teardown(my_file_handle) - self.assertEqual(findings[0].date, datetime.date(2018, 2, 25)) - - @patch('dojo.tools.checkmarx.parser.add_language') - def test_finding_date_should_be_date_json(self, mock): - my_file_handle, product, engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings.json" - ) - parser = CheckmarxParser() - parser.set_mode('detailed') - findings = parser.get_findings(my_file_handle, test) - self.teardown(my_file_handle) - self.assertEqual(findings[0].date, datetime.date(2022, 2, 25)) + self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) diff --git a/unittests/tools/test_crunch42_parser.py b/unittests/tools/test_crunch42_parser.py deleted file mode 100644 index ea5188d303..0000000000 --- a/unittests/tools/test_crunch42_parser.py +++ /dev/null @@ -1,32 +0,0 @@ -from ..dojo_test_case import DojoTestCase -from dojo.models import Test -from dojo.tools.crunch42.parser import Crunch42Parser - - -class TestCrunch42Parser(DojoTestCase): - - def test_crunch42parser_single_has_many_findings(self): - testfile = open("unittests/scans/crunch42/crunch42_many_findings.json") - parser = Crunch42Parser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(8, len(findings)) - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("teephei0aes4ohxur7Atie6zuiCh9weeshue0kai", finding.unique_id_from_tool) - self.assertEqual("Info", finding.severity) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - - def test_crunch42parser_single_has_many_findings2(self): - testfile = open("unittests/scans/crunch42/crunch42_many_findings2.json") - parser = Crunch42Parser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(5, len(findings)) - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje", finding.unique_id_from_tool) - self.assertEqual("Info", finding.severity) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) diff --git a/unittests/tools/test_npm_audit_7_plus_parser.py b/unittests/tools/test_npm_audit_7_plus_parser.py deleted file mode 100644 index cf1cb339e7..0000000000 --- a/unittests/tools/test_npm_audit_7_plus_parser.py +++ /dev/null @@ -1,41 +0,0 @@ -from os import path -from ..dojo_test_case import DojoTestCase -from dojo.tools.npm_audit_7_plus.parser import NpmAudit7PlusParser -from dojo.models import Test - - -class TestNpmAudit7PlusParser(DojoTestCase): - def test_npm_audit_7_plus_parser_with_no_vuln_has_no_findings(self): - testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/no_vuln.json")) - parser = NpmAudit7PlusParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) - - def test_npm_audit_7_plus_parser_with_one_vuln_has_one_findings(self): - testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/one_vuln.json")) - parser = NpmAudit7PlusParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(1, len(findings)) - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("High", finding.severity) - self.assertEqual(400, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L", finding.cvssv3) - - def test_npm_audit_7_plus_parser_with_many_vuln_has_many_findings(self): - testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/many_vulns.json")) - parser = NpmAudit7PlusParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(6, len(findings)) - with self.subTest(i=0): - finding = findings[0] - self.assertEqual("Medium", finding.severity) - self.assertEqual(1035, finding.cwe) - self.assertIsNotNone(finding.description) - self.assertGreater(len(finding.description), 0) - self.assertEqual("@vercel/fun", finding.title) diff --git a/unittests/tools/test_pip_audit_parser.py b/unittests/tools/test_pip_audit_parser.py index 237945cfc6..eb421f761a 100644 --- a/unittests/tools/test_pip_audit_parser.py +++ b/unittests/tools/test_pip_audit_parser.py @@ -7,83 +7,80 @@ class TestPipAuditParser(DojoTestCase): def test_parser_empty(self): - testfiles = ["unittests/scans/pip_audit/empty.json", - "unittests/scans/pip_audit/empty_new.json"] - for path in testfiles: - testfile = open(path) - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) + testfile = open("unittests/scans/pip_audit/empty.json") + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) def test_parser_zero_findings(self): - testfiles = ["unittests/scans/pip_audit/zero_vulns.json", - "unittests/scans/pip_audit/zero_vulns_new.json"] - for path in testfiles: - testfile = open(path) - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) + testfile = open("unittests/scans/pip_audit/zero_vulns.json") + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) def test_parser_many_vulns(self): - testfiles = ["unittests/scans/pip_audit/many_vulns.json", - "unittests/scans/pip_audit/many_vulns_new.json"] - for path in testfiles: - testfile = open(path) - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(7, len(findings)) + testfile = open("unittests/scans/pip_audit/many_vulns.json") + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(7, len(findings)) - finding = findings[0] - self.assertEqual('PYSEC-2021-76 in aiohttp:3.6.2', finding.title) - description = 'aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows "pip install aiohttp >= 3.7.4". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications.' - self.assertEqual(description, finding.description) - self.assertEqual(1395, finding.cwe) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-76', vulnerability_ids[0]) - self.assertEqual('Medium', finding.severity) - self.assertEqual('Upgrade to version: 3.7.4', finding.mitigation) - self.assertEqual('aiohttp', finding.component_name) - self.assertEqual('3.6.2', finding.component_version) - self.assertEqual('PYSEC-2021-76', finding.vuln_id_from_tool) + finding = findings[0] + self.assertEqual('PYSEC-2021-76 in aiohttp:3.6.2', finding.title) + description = 'aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows "pip install aiohttp >= 3.7.4". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications.' + self.assertEqual(description, finding.description) + self.assertEqual(1352, finding.cwe) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-76', vulnerability_ids[0]) + self.assertEqual('Medium', finding.severity) + self.assertEqual('Upgrade to version: 3.7.4', finding.mitigation) + self.assertEqual('aiohttp', finding.component_name) + self.assertEqual('3.6.2', finding.component_version) + self.assertEqual('PYSEC-2021-76', finding.vuln_id_from_tool) - finding = findings[1] - self.assertEqual('PYSEC-2021-439 in django:3.2.9', finding.title) - description = 'In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths.' - self.assertEqual(description, finding.description) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-439', vulnerability_ids[0]) - self.assertEqual(1395, finding.cwe) - self.assertEqual('Medium', finding.severity) - self.assertEqual('django', finding.component_name) - self.assertEqual('3.2.9', finding.component_version) - self.assertEqual('PYSEC-2021-439', finding.vuln_id_from_tool) + finding = findings[1] + self.assertEqual('PYSEC-2021-439 in django:3.2.9', finding.title) + description = 'In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths.' + self.assertEqual(description, finding.description) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-439', vulnerability_ids[0]) + self.assertEqual(1352, finding.cwe) + self.assertEqual('Medium', finding.severity) + mitigation = '''Upgrade to version: +- 2.2.25 +- 3.1.14 +- 3.2.10''' + self.assertEqual(mitigation, finding.mitigation) + self.assertEqual('django', finding.component_name) + self.assertEqual('3.2.9', finding.component_version) + self.assertEqual('PYSEC-2021-439', finding.vuln_id_from_tool) - finding = findings[2] - self.assertEqual('PYSEC-2021-852 in lxml:4.6.4', finding.title) - description = 'lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available.' - self.assertEqual(description, finding.description) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-852', vulnerability_ids[0]) - self.assertEqual(1395, finding.cwe) - self.assertEqual('Medium', finding.severity) - self.assertEqual('lxml', finding.component_name) - self.assertEqual('4.6.4', finding.component_version) - self.assertEqual('PYSEC-2021-852', finding.vuln_id_from_tool) + finding = findings[2] + self.assertEqual('PYSEC-2021-852 in lxml:4.6.4', finding.title) + description = 'lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available.' + self.assertEqual(description, finding.description) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-852', vulnerability_ids[0]) + self.assertEqual(1352, finding.cwe) + self.assertEqual('Medium', finding.severity) + self.assertIsNone(finding.mitigation) + self.assertEqual('lxml', finding.component_name) + self.assertEqual('4.6.4', finding.component_version) + self.assertEqual('PYSEC-2021-852', finding.vuln_id_from_tool) - finding = findings[3] - self.assertEqual('PYSEC-2019-128 in twisted:18.9.0', finding.title) + finding = findings[3] + self.assertEqual('PYSEC-2019-128 in twisted:18.9.0', finding.title) - finding = findings[4] - self.assertEqual('PYSEC-2020-260 in twisted:18.9.0', finding.title) + finding = findings[4] + self.assertEqual('PYSEC-2020-260 in twisted:18.9.0', finding.title) - finding = findings[5] - self.assertEqual('PYSEC-2019-129 in twisted:18.9.0', finding.title) + finding = findings[5] + self.assertEqual('PYSEC-2019-129 in twisted:18.9.0', finding.title) - finding = findings[6] - self.assertEqual('PYSEC-2020-259 in twisted:18.9.0', finding.title) + finding = findings[6] + self.assertEqual('PYSEC-2020-259 in twisted:18.9.0', finding.title) From 7443312e3e91de36ab487ffbc2e3ff00a2472ede Mon Sep 17 00:00:00 2001 From: biennd4 Date: Tue, 12 Mar 2024 12:35:59 +0700 Subject: [PATCH 09/21] Revert "Revert "Merge remote-tracking branch 'upstream/dev' into feature-checkmarx-cxflow-sast"" This reverts commit f9cdafb72881454741e8fbb3dd2358dfb2c79fd5. --- .github/workflows/k8s-tests.yml | 12 - Dockerfile.integration-tests-debian | 2 +- components/yarn.lock | 4 - .../parsers/file/checkmarx_one.md | 8 + .../en/integrations/parsers/file/crunch42.md | 8 + .../parsers/file/npm_audit_7_plus.md | 26 ++ .../en/integrations/parsers/file/pip_audit.md | 38 +- dojo/endpoint/views.py | 18 +- dojo/jira_link/helper.py | 190 +++++--- dojo/locale/en/LC_MESSAGES/django.po | 2 +- dojo/models.py | 2 +- dojo/product/views.py | 158 ++++--- dojo/settings/settings.dist.py | 5 + dojo/static/dojo/js/metrics.js | 6 - dojo/templates/base.html | 6 +- dojo/templates/dojo/endpoints.html | 15 +- dojo/templates/dojo/product.html | 4 - dojo/templates/dojo/view_endpoint.html | 8 +- dojo/tools/checkmarx/parser.py | 6 +- dojo/tools/checkmarx_one/__init__.py | 0 dojo/tools/checkmarx_one/parser.py | 110 +++++ dojo/tools/crunch42/__init__.py | 0 dojo/tools/crunch42/parser.py | 88 ++++ dojo/tools/npm_audit_7_plus/__init__.py | 0 dojo/tools/npm_audit_7_plus/parser.py | 225 +++++++++ dojo/tools/pip_audit/parser.py | 136 ++++-- dojo/utils.py | 7 +- helm/defectdojo/Chart.yaml | 2 +- .../scans/checkmarx_one/checkmarx_one.json | 284 +++++++++++ .../scans/checkmarx_one/many_findings.json | 258 ++++++++++ .../scans/checkmarx_one/no_findings.json | 6 + .../crunch42/crunch42_many_findings.json | 251 ++++++++++ .../crunch42/crunch42_many_findings2.json | 442 ++++++++++++++++++ .../scans/npm_audit_7_plus/many_vulns.json | 188 ++++++++ unittests/scans/npm_audit_7_plus/no_vuln.json | 23 + .../scans/npm_audit_7_plus/one_vuln.json | 75 +++ unittests/scans/pip_audit/empty_new.json | 3 + unittests/scans/pip_audit/many_vulns_new.json | 91 ++++ unittests/scans/pip_audit/zero_vulns_new.json | 18 + unittests/tools/test_checkmarx_one_parser.py | 47 ++ unittests/tools/test_checkmarx_parser.py | 54 ++- unittests/tools/test_crunch42_parser.py | 32 ++ .../tools/test_npm_audit_7_plus_parser.py | 41 ++ unittests/tools/test_pip_audit_parser.py | 135 +++--- 44 files changed, 2686 insertions(+), 348 deletions(-) create mode 100644 docs/content/en/integrations/parsers/file/checkmarx_one.md create mode 100644 docs/content/en/integrations/parsers/file/crunch42.md create mode 100644 docs/content/en/integrations/parsers/file/npm_audit_7_plus.md create mode 100644 dojo/tools/checkmarx_one/__init__.py create mode 100644 dojo/tools/checkmarx_one/parser.py create mode 100644 dojo/tools/crunch42/__init__.py create mode 100644 dojo/tools/crunch42/parser.py create mode 100644 dojo/tools/npm_audit_7_plus/__init__.py create mode 100644 dojo/tools/npm_audit_7_plus/parser.py create mode 100644 unittests/scans/checkmarx_one/checkmarx_one.json create mode 100644 unittests/scans/checkmarx_one/many_findings.json create mode 100644 unittests/scans/checkmarx_one/no_findings.json create mode 100644 unittests/scans/crunch42/crunch42_many_findings.json create mode 100644 unittests/scans/crunch42/crunch42_many_findings2.json create mode 100644 unittests/scans/npm_audit_7_plus/many_vulns.json create mode 100644 unittests/scans/npm_audit_7_plus/no_vuln.json create mode 100644 unittests/scans/npm_audit_7_plus/one_vuln.json create mode 100644 unittests/scans/pip_audit/empty_new.json create mode 100644 unittests/scans/pip_audit/many_vulns_new.json create mode 100644 unittests/scans/pip_audit/zero_vulns_new.json create mode 100644 unittests/tools/test_checkmarx_one_parser.py create mode 100644 unittests/tools/test_crunch42_parser.py create mode 100644 unittests/tools/test_npm_audit_7_plus_parser.py diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index f5ec107d83..dd34b88d76 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -29,14 +29,6 @@ env: --set mysql.enabled=false \ --set createPostgresqlSecret=true \ " - HELM_PGHA_DATABASE_SETTINGS: " \ - --set database=postgresqlha \ - --set postgresql.enabled=false \ - --set mysql.enabled=false \ - --set postgresqlha.enabled=true \ - --set createPostgresqlHaSecret=true \ - --set createPostgresqlHaPgpoolSecret=true \ - " jobs: setting_minikube_cluster: name: Kubernetes Deployment @@ -64,10 +56,6 @@ jobs: brokers: redis k8s: 'v1.23.9' os: debian - - databases: pgsqlha - brokers: rabbit - k8s: 'v1.23.9' - os: debian - databases: pgsql brokers: rabbit k8s: 'v1.23.9' diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index c7db1f1fee..545e4e3ef7 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,7 +1,7 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.3.0@sha256:74b9992692c836e42a02980db4b76bee94e17075e4487cd80f5c540dd57126b9 as openapitools +FROM openapitools/openapi-generator-cli:v7.4.0@sha256:579832bed49ea6c275ce2fb5f2d515f5b03d2b6243f3c80fa8430e4f5a770e9a as openapitools FROM python:3.11.4-slim-bullseye@sha256:40319d0a897896e746edf877783ef39685d44e90e1e6de8d964d0382df0d4952 as build WORKDIR /app RUN \ diff --git a/components/yarn.lock b/components/yarn.lock index ffe72a3aaf..d3d65c363f 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -538,10 +538,6 @@ fast-levenshtein@~2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -flot-axis@markrcote/flot-axislabels#*: - version "0.0.0" - resolved "https://codeload.github.com/markrcote/flot-axislabels/tar.gz/a181e09d04d120d05e5bc2baaa8738b5b3670428" - flot@flot/flot#~0.8.3: version "0.8.3" resolved "https://codeload.github.com/flot/flot/tar.gz/453b017cc5acfd75e252b93e8635f57f4196d45d" diff --git a/docs/content/en/integrations/parsers/file/checkmarx_one.md b/docs/content/en/integrations/parsers/file/checkmarx_one.md new file mode 100644 index 0000000000..1d5a07f0ca --- /dev/null +++ b/docs/content/en/integrations/parsers/file/checkmarx_one.md @@ -0,0 +1,8 @@ +--- +title: "Checkmarx One Scan" +toc_hide: true +--- +Import JSON Checkmarx One scanner reports + +### Sample Scan Data +Sample Checkmarx One scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkmarx_one). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/crunch42.md b/docs/content/en/integrations/parsers/file/crunch42.md new file mode 100644 index 0000000000..e8aa1b1e55 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/crunch42.md @@ -0,0 +1,8 @@ +--- +title: "Crunch42 Scan" +toc_hide: true +--- +Import JSON findings from Crunch42 vulnerability scan tool. + +### Sample Scan Data +Sample Crunch42 Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/crunch42). \ No newline at end of file diff --git a/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md b/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md new file mode 100644 index 0000000000..a4b4a090b0 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/npm_audit_7_plus.md @@ -0,0 +1,26 @@ +--- +title: "NPM Audit Version 7+" +toc_hide: true +--- + +**Note: This parser only supports import from NPM Audit v7 or newer.** + +Node Package Manager (NPM) Audit plugin output file can be imported in +JSON format. Only imports the \'vulnerabilities\' subtree. + +### File Types +This parser expects a JSON file. Can only import NPM Audit files from NPM Audit v7 or newer. It aims to provide the same +information as the non-JSON formatted output. + +Attempting to import a file from a version less than 7 of NPM Audit will raise an error message. + +### Command Used To Generate Output +Either of these commands will work: +- \`npm audit --json\` +- \`npm audit fix --dry-run --json\` + +### Sample Scan Data +Sample NPM Audit scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/npm_audit_7_plus). + +### Link To Tool +See NPM-Audit-Report on GitHub: https://github.com/npm/npm-audit-report/ diff --git a/docs/content/en/integrations/parsers/file/pip_audit.md b/docs/content/en/integrations/parsers/file/pip_audit.md index df24cdbe7a..96b9b250d5 100644 --- a/docs/content/en/integrations/parsers/file/pip_audit.md +++ b/docs/content/en/integrations/parsers/file/pip_audit.md @@ -2,7 +2,41 @@ title: "pip-audit Scan" toc_hide: true --- -Import pip-audit JSON scan report + +Import pip-audit JSON scan report. + +### File Types +This parser expects a JSON file. + +The parser can handle legacy and current JSON format. + +The current format has added a `dependencies` element: + + { + "dependencies": [ + { + "name": "pyopenssl", + "version": "23.1.0", + "vulns": [] + }, + ... + ] + ... + } + +The legacy format does not include the `dependencies` key: + + [ + { + "name": "adal", + "version": "1.2.2", + "vulns": [] + }, + ... + ] ### Sample Scan Data -Sample pip-audit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pip_audit). \ No newline at end of file +Sample pip-audit Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/pip_audit). + +### Link To Tool +[pip-audit](https://pypi.org/project/pip-audit/) diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index c2b491eb1a..0f5b7676c7 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -33,12 +33,6 @@ def process_endpoints_view(request, host_view=False, vulnerable=False): if vulnerable: endpoints = Endpoint.objects.filter( - finding__active=True, - finding__verified=True, - finding__out_of_scope=False, - finding__mitigated__isnull=True, - finding__false_p=False, - finding__duplicate=False, status_endpoint__mitigated=False, status_endpoint__false_positive=False, status_endpoint__out_of_scope=False, @@ -124,12 +118,12 @@ def process_endpoint_view(request, eid, host_view=False): endpoints = endpoint.host_endpoints() endpoint_metadata = None all_findings = endpoint.host_findings() - active_verified_findings = endpoint.host_active_verified_findings() + active_findings = endpoint.host_active_findings() else: endpoints = None endpoint_metadata = dict(endpoint.endpoint_meta.values_list('name', 'value')) all_findings = endpoint.findings.all() - active_verified_findings = endpoint.active_verified_findings() + active_findings = endpoint.active_findings() if all_findings: start_date = timezone.make_aware(datetime.combine(all_findings.last().date, datetime.min.time())) @@ -148,12 +142,8 @@ def process_endpoint_view(request, eid, host_view=False): monthly_counts = get_period_counts(all_findings, closed_findings, None, months_between, start_date, relative_delta='months') - paged_findings = get_page_items(request, active_verified_findings, 25) - - vulnerable = False - - if active_verified_findings.count() != 0: - vulnerable = True + paged_findings = get_page_items(request, active_findings, 25) + vulnerable = active_findings.count() != 0 product_tab = Product_Tab(endpoint.product, "Host" if host_view else "Endpoint", tab="endpoints") return render(request, diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 4f7360fc46..7b20a8cc10 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1,4 +1,5 @@ import logging +from typing import Any from dojo.utils import add_error_message_to_response, get_system_setting, to_str_typed import os import io @@ -695,6 +696,13 @@ def prepare_jira_issue_fields( def add_jira_issue(obj, *args, **kwargs): + def failure_to_add_message(message: str, exception: Exception, object: Any) -> bool: + if exception: + logger.exception(exception) + logger.error(message) + log_jira_alert(message, obj) + return False + logger.info('trying to create a new jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -702,9 +710,7 @@ def add_jira_issue(obj, *args, **kwargs): if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - logger.error(message) - log_jira_alert(message, obj) - return False + return failure_to_add_message(message, None, obj) jira_project = get_jira_project(obj) jira_instance = get_jira_instance(obj) @@ -719,19 +725,23 @@ def add_jira_issue(obj, *args, **kwargs): logger.warning("The JIRA issue will NOT be created.") return False logger.debug('Trying to create a new JIRA issue for %s...', to_str_typed(obj)) - meta = None + # Attempt to get the jira connection try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) - - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - - duedate = None - if System_Settings.objects.get().enable_finding_sla: - duedate = obj.sla_deadline() - + except Exception as e: + message = f"The following jira instance could not be connected: {jira_instance} - {e.text}" + return failure_to_add_message(message, e, obj) + # Set the list of labels to set on the jira issue + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + # Determine what due date to set on the jira issue + duedate = None + if System_Settings.objects.get().enable_finding_sla: + duedate = obj.sla_deadline() + # Set the fields that will compose the jira issue + try: issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -747,16 +757,40 @@ def add_jira_issue(obj, *args, **kwargs): duedate=duedate, issuetype_fields=issuetype_fields, default_assignee=jira_project.default_assignee) - + except TemplateDoesNotExist as e: + message = f"Failed to find a jira issue template to be used - {e}" + return failure_to_add_message(message, e, obj) + except Exception as e: + message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" + return failure_to_add_message(message, e, obj) + # Create a new issue in Jira with the fields set in the last step + try: logger.debug('sending fields to JIRA: %s', fields) new_issue = jira.create_issue(fields) + logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) + j_issue = JIRA_Issue(jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) + j_issue.set_obj(obj) + j_issue.jira_creation = timezone.now() + j_issue.jira_change = timezone.now() + j_issue.save() + jira.issue(new_issue.id) + logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) + except Exception as e: + message = f"Failed to create jira issue with the following payload: {fields} - {e}" + return failure_to_add_message(message, e, obj) + # Attempt to set a default assignee + try: if jira_project.default_assignee: created_assignee = str(new_issue.get_field('assignee')) logger.debug("new issue created with assignee %s", created_assignee) if created_assignee != jira_project.default_assignee: jira.assign_issue(new_issue.key, jira_project.default_assignee) - - # Upload dojo finding screenshots to Jira + except Exception as e: + message = f"Failed to assign the default user: {jira_project.default_assignee} - {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Upload dojo finding screenshots to Jira + try: findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() @@ -771,7 +805,22 @@ def add_jira_issue(obj, *args, **kwargs): settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - + except Exception as e: + message = f"Failed to attach attachments to the jira issue: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Add any notes that already exist in the finding to the JIRA + try: + for find in findings: + if find.notes.all(): + for note in find.notes.all().reverse(): + add_comment(obj, note) + except Exception as e: + message = f"Failed to add notes to the jira ticket: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Determine whether to assign this new jira issue to a mapped epic + try: if jira_project.enable_engagement_epic_mapping: eng = obj.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -780,36 +829,11 @@ def add_jira_issue(obj, *args, **kwargs): add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(new_issue.id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) + except Exception as e: + message = f"Failed to assign jira issue to existing epic: {e}" + return failure_to_add_message(message, e, obj) - # only link the new issue if it was successfully created, incl attachments and epic link - logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) - j_issue = JIRA_Issue( - jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) - j_issue.set_obj(obj) - - j_issue.jira_creation = timezone.now() - j_issue.jira_change = timezone.now() - j_issue.save() - jira.issue(new_issue.id) - - logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) - - # Add any notes that already exist in the finding to the JIRA - for find in findings: - if find.notes.all(): - for note in find.notes.all().reverse(): - add_comment(obj, note) - - return True - except TemplateDoesNotExist as e: - logger.exception(e) - log_jira_alert(str(e), obj) - return False - except JIRAError as e: - logger.exception(e) - logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe - log_jira_alert(e.text, obj) - return False + return True # we need two separate celery tasks due to the decorators we're using to map to/from ids @@ -831,6 +855,13 @@ def update_jira_issue_for_finding_group(finding_group, *args, **kwargs): def update_jira_issue(obj, *args, **kwargs): + def failure_to_update_message(message: str, exception: Exception, obj: Any) -> bool: + if exception: + logger.exception(exception) + logger.error(message) + log_jira_alert(message, obj) + return False + logger.debug('trying to update a linked jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -841,21 +872,22 @@ def update_jira_issue(obj, *args, **kwargs): if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - logger.error(message) - log_jira_alert(message, obj) - return False + return failure_to_update_message(message, None, obj) j_issue = obj.jira_issue - meta = None try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) issue = jira.issue(j_issue.jira_id) - - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - + except Exception as e: + message = f"The following jira instance could not be connected: {jira_instance} - {e}" + return failure_to_update_message(message, e, obj) + # Set the list of labels to set on the jira issue + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + # Set the fields that will compose the jira issue + try: issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -868,26 +900,38 @@ def update_jira_issue(obj, *args, **kwargs): # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority_name=jira_priority(obj), issuetype_fields=issuetype_fields) - + except Exception as e: + message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" + return failure_to_update_message(message, e, obj) + # Update the issue in jira + try: logger.debug('sending fields to JIRA: %s', fields) - issue.update( summary=fields['summary'], description=fields['description'], # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority=fields['priority'], fields=fields) - + j_issue.jira_change = timezone.now() + j_issue.save() + except Exception as e: + message = f"Failed to update the jira issue with the following payload: {fields} - {e}" + return failure_to_update_message(message, e, obj) + # Update the status in jira + try: push_status_to_jira(obj, jira_instance, jira, issue) - - # Upload dojo finding screenshots to Jira + except Exception as e: + message = f"Failed to update the jira issue status - {e}" + return failure_to_update_message(message, e, obj) + # Upload dojo finding screenshots to Jira + try: findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() for find in findings: for pic in get_file_images(find): - # It doesn't look like the celery cotainer has anything in the media + # It doesn't look like the celery container has anything in the media # folder. Has this feature ever worked? try: jira_attachment( @@ -895,7 +939,12 @@ def update_jira_issue(obj, *args, **kwargs): settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - + except Exception as e: + message = f"Failed to attach attachments to the jira issue: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_update_message(message, e, obj) + # Determine whether to assign this new jira issue to a mapped epic + try: if jira_project.enable_engagement_epic_mapping: eng = find.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -904,20 +953,11 @@ def update_jira_issue(obj, *args, **kwargs): add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(j_issue.jira_id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) + except Exception as e: + message = f"Failed to assign jira issue to existing epic: {e}" + return failure_to_update_message(message, e, obj) - j_issue.jira_change = timezone.now() - j_issue.save() - - logger.debug('Updated the following linked jira issue for %d:%s', find.id, find.title) - return True - - except JIRAError as e: - logger.exception(e) - logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe - if issue_from_jira_is_active(issue): - # Only alert if the upstream JIRA is active, we don't care about closed issues - log_jira_alert(e.text, obj) - return False + return True def get_jira_issue_from_jira(find): diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index ab26c8cbdb..92e365e334 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -3748,7 +3748,7 @@ msgid "" "tags, references, languages or technologies contain the search query and " "products whose\n" " name, tags or description contain the " -"search query.
Advanced search operators: (Restrict results to a certain " +"search query.
Advanced search operators: (Restrict results to a certain " "type) product:,\n" " engagement:, finding:, endpoint:, tag:, " "language:, technology: or vulnerability_id:.\n" diff --git a/dojo/models.py b/dojo/models.py index 362ec399b6..36a7d2e520 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -1124,7 +1124,7 @@ def endpoint_count(self): endpoints = getattr(self, 'active_endpoints', None) if endpoints: return len(self.active_endpoints) - return None + return 0 def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: diff --git a/dojo/product/views.py b/dojo/product/views.py index ee7c3b35e8..6291540342 100755 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -112,8 +112,11 @@ def prefetch_for_product(prods): prefetched_prods = prefetched_prods.prefetch_related('members') prefetched_prods = prefetched_prods.prefetch_related('prod_type__members') active_endpoint_query = Endpoint.objects.filter( - finding__active=True, - finding__mitigated__isnull=True).distinct() + status_endpoint__mitigated=False, + status_endpoint__false_positive=False, + status_endpoint__out_of_scope=False, + status_endpoint__risk_accepted=False, + ).distinct() prefetched_prods = prefetched_prods.prefetch_related( Prefetch('endpoint_set', queryset=active_endpoint_query, to_attr='active_endpoints')) prefetched_prods = prefetched_prods.prefetch_related('tags') @@ -323,15 +326,15 @@ def finding_querys(request, prod): end_date = timezone.now() week = end_date - timedelta(days=7) # seven days and /newer are considered "new" - filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters['verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters['new_verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['all'] = findings_qs + filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['all'] = findings_qs.order_by("date") filters['open_vulns'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter( cwe__isnull=False, ).order_by('cwe').values( @@ -476,6 +479,7 @@ def view_product_metrics(request, pid): add_breadcrumb(parent=prod, top_level=False, request=request) + # An ordered dict does not make sense here. open_close_weekly = OrderedDict() severity_weekly = OrderedDict() critical_weekly = OrderedDict() @@ -483,81 +487,83 @@ def view_product_metrics(request, pid): medium_weekly = OrderedDict() open_objs_by_severity = get_zero_severity_level() + closed_objs_by_severity = get_zero_severity_level() accepted_objs_by_severity = get_zero_severity_level() - for v in filters.get('open', None): - iso_cal = v.date.isocalendar() - x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) - y = x.strftime("%m/%d
%Y
") - x = (tcalendar.timegm(x.timetuple()) * 1000) - if x not in critical_weekly: - critical_weekly[x] = {'count': 0, 'week': y} - if x not in high_weekly: - high_weekly[x] = {'count': 0, 'week': y} - if x not in medium_weekly: - medium_weekly[x] = {'count': 0, 'week': y} - - if x in open_close_weekly: - if v.mitigated: - open_close_weekly[x]['closed'] += 1 + for finding in filters.get("all", []): + iso_cal = finding.date.isocalendar() + date = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) + html_date = date.strftime("%m/%d
%Y
") + unix_timestamp = (tcalendar.timegm(date.timetuple()) * 1000) + + # Open findings + if finding in filters.get("open", []): + if unix_timestamp not in critical_weekly: + critical_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + if unix_timestamp not in high_weekly: + high_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + if unix_timestamp not in medium_weekly: + medium_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['open'] += 1 else: - open_close_weekly[x]['open'] += 1 - else: - if v.mitigated: - open_close_weekly[x] = {'closed': 1, 'open': 0, 'accepted': 0} - else: - open_close_weekly[x] = {'closed': 0, 'open': 1, 'accepted': 0} - open_close_weekly[x]['week'] = y + open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 1, 'accepted': 0} + open_close_weekly[unix_timestamp]['week'] = html_date - if view == 'Finding': - severity = v.severity - elif view == 'Endpoint': - severity = v.finding.severity + if view == 'Finding': + severity = finding.severity + elif view == 'Endpoint': + severity = finding.finding.severity - if x in severity_weekly: - if severity in severity_weekly[x]: - severity_weekly[x][severity] += 1 + if unix_timestamp in severity_weekly: + if severity in severity_weekly[unix_timestamp]: + severity_weekly[unix_timestamp][severity] += 1 + else: + severity_weekly[unix_timestamp][severity] = 1 else: - severity_weekly[x][severity] = 1 - else: - severity_weekly[x] = get_zero_severity_level() - severity_weekly[x][severity] = 1 - severity_weekly[x]['week'] = y + severity_weekly[unix_timestamp] = get_zero_severity_level() + severity_weekly[unix_timestamp][severity] = 1 + severity_weekly[unix_timestamp]['week'] = html_date - if severity == 'Critical': - if x in critical_weekly: - critical_weekly[x]['count'] += 1 - else: - critical_weekly[x] = {'count': 1, 'week': y} - elif severity == 'High': - if x in high_weekly: - high_weekly[x]['count'] += 1 + if severity == 'Critical': + if unix_timestamp in critical_weekly: + critical_weekly[unix_timestamp]['count'] += 1 + else: + critical_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + elif severity == 'High': + if unix_timestamp in high_weekly: + high_weekly[unix_timestamp]['count'] += 1 + else: + high_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + elif severity == 'Medium': + if unix_timestamp in medium_weekly: + medium_weekly[unix_timestamp]['count'] += 1 + else: + medium_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + # Optimization: count severity level on server side + if open_objs_by_severity.get(finding.severity) is not None: + open_objs_by_severity[finding.severity] += 1 + # Close findings + if finding in filters.get("closed", []): + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['closed'] += 1 else: - high_weekly[x] = {'count': 1, 'week': y} - elif severity == 'Medium': - if x in medium_weekly: - medium_weekly[x]['count'] += 1 + open_close_weekly[unix_timestamp] = {'closed': 1, 'open': 0, 'accepted': 0} + open_close_weekly[unix_timestamp]['week'] = html_date + # Optimization: count severity level on server side + if closed_objs_by_severity.get(finding.severity) is not None: + closed_objs_by_severity[finding.severity] += 1 + # Risk Accepted findings + if finding in filters.get("accepted", []): + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['accepted'] += 1 else: - medium_weekly[x] = {'count': 1, 'week': y} - - # Optimization: count severity level on server side - if open_objs_by_severity.get(v.severity) is not None: - open_objs_by_severity[v.severity] += 1 - - for a in filters.get('accepted', None): - iso_cal = a.date.isocalendar() - x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) - y = x.strftime("%m/%d
%Y
") - x = (tcalendar.timegm(x.timetuple()) * 1000) - - if x in open_close_weekly: - open_close_weekly[x]['accepted'] += 1 - else: - open_close_weekly[x] = {'closed': 0, 'open': 0, 'accepted': 1} - open_close_weekly[x]['week'] = y - - if accepted_objs_by_severity.get(a.severity) is not None: - accepted_objs_by_severity[a.severity] += 1 + open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 0, 'accepted': 1} + open_close_weekly[unix_timestamp]['week'] = html_date + # Optimization: count severity level on server side + if accepted_objs_by_severity.get(finding.severity) is not None: + accepted_objs_by_severity[finding.severity] += 1 test_data = {} for t in tests: @@ -584,7 +590,7 @@ def view_product_metrics(request, pid): 'inactive_objs': filters.get('inactive', None), 'inactive_objs_by_severity': sum_by_severity_level(filters.get('inactive')), 'closed_objs': filters.get('closed', None), - 'closed_objs_by_severity': sum_by_severity_level(filters.get('closed')), + 'closed_objs_by_severity': closed_objs_by_severity, 'false_positive_objs': filters.get('false_positive', None), 'false_positive_objs_by_severity': sum_by_severity_level(filters.get('false_positive')), 'out_of_scope_objs': filters.get('out_of_scope', None), diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index c2d85ec397..54e83542eb 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1187,6 +1187,7 @@ def saml2_attrib_map_format(dict): 'Nexpose Scan': ['title', 'severity', 'vulnerability_ids', 'cwe'], # possible improvement: in the scanner put the library name into file_path, then dedup on cwe + file_path + severity 'NPM Audit Scan': ['title', 'severity', 'file_path', 'vulnerability_ids', 'cwe'], + 'NPM Audit v7+ Scan': ['title', 'severity', 'cwe', 'vuln_id_from_tool'], # possible improvement: in the scanner put the library name into file_path, then dedup on cwe + file_path + severity 'Yarn Audit Scan': ['title', 'severity', 'file_path', 'vulnerability_ids', 'cwe'], # possible improvement: in the scanner put the library name into file_path, then dedup on vulnerability_ids + file_path + severity @@ -1280,6 +1281,7 @@ def saml2_attrib_map_format(dict): 'Tenable Scan': True, 'Nexpose Scan': True, 'NPM Audit Scan': True, + 'NPM Audit v7+ Scan': True, 'Yarn Audit Scan': True, 'Mend Scan': True, 'ZAP Scan': False, @@ -1362,10 +1364,12 @@ def saml2_attrib_map_format(dict): 'CargoAudit Scan': DEDUPE_ALGO_HASH_CODE, 'Checkmarx Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx Scan': DEDUPE_ALGO_HASH_CODE, + 'Checkmarx One Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx OSA': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, 'Codechecker Report native': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Coverity API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Cobalt.io API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, + 'Crunch42 Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Dependency Track Finding Packaging Format (FPF) Export': DEDUPE_ALGO_HASH_CODE, 'Mobsfscan Scan': DEDUPE_ALGO_HASH_CODE, 'SonarQube Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, @@ -1377,6 +1381,7 @@ def saml2_attrib_map_format(dict): 'Tenable Scan': DEDUPE_ALGO_HASH_CODE, 'Nexpose Scan': DEDUPE_ALGO_HASH_CODE, 'NPM Audit Scan': DEDUPE_ALGO_HASH_CODE, + 'NPM Audit v7+ Scan': DEDUPE_ALGO_HASH_CODE, 'Yarn Audit Scan': DEDUPE_ALGO_HASH_CODE, 'Mend Scan': DEDUPE_ALGO_HASH_CODE, 'ZAP Scan': DEDUPE_ALGO_HASH_CODE, diff --git a/dojo/static/dojo/js/metrics.js b/dojo/static/dojo/js/metrics.js index 392ad2ac6f..2e95555d37 100644 --- a/dojo/static/dojo/js/metrics.js +++ b/dojo/static/dojo/js/metrics.js @@ -1618,8 +1618,6 @@ function open_close_weekly(opened, closed, accepted, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1661,8 +1659,6 @@ function severity_weekly(critical, high, medium, low, info, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1713,8 +1709,6 @@ function severity_counts_weekly(critical, high, medium, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 diff --git a/dojo/templates/base.html b/dojo/templates/base.html index f4043d42e3..2f1cace966 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -765,10 +765,8 @@

diff --git a/dojo/templates/dojo/endpoints.html b/dojo/templates/dojo/endpoints.html index ecaaef6d52..6597e1f747 100644 --- a/dojo/templates/dojo/endpoints.html +++ b/dojo/templates/dojo/endpoints.html @@ -87,7 +87,7 @@

{% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} {% dojo_sort request 'Product' 'product' 'asc' %} {% endif %} - Active Verified Findings + Active (Verified) Findings Status @@ -117,13 +117,10 @@

{% endif %} {% if host_view %} - {{ e.host_active_verified_findings_count }} + {{ e.host_active_findings_count }} ({{ e.host_active_verified_findings_count }}) {% else %} - {% if e.active_verified_findings_count > 0 %} - {{ e.active_verified_findings_count }} - {% else %} - 0 - {% endif %} + {{ e.active_findings_count }} + ({{ e.active_verified_findings_count }}) {% endif %} @@ -133,10 +130,10 @@

{% if e.mitigated %} Mitigated {% else %} - {% if e.active_verified_findings_count > 0 %} + {% if e.active_findings_count > 0 %} Vulnerable {% else %} - No active verified findings + No active findings {% endif %} {% endif %} {% endif %} diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index e328557c87..d022812de8 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -248,12 +248,8 @@

{% endif %} - {% if prod.endpoint_count %} {{ prod.endpoint_host_count }} / {{ prod.endpoint_count }} - {% else %} - 0 - {% endif %} {% if prod.product_manager %} diff --git a/dojo/templates/dojo/view_endpoint.html b/dojo/templates/dojo/view_endpoint.html index 30d974b8a6..d09261e5ec 100644 --- a/dojo/templates/dojo/view_endpoint.html +++ b/dojo/templates/dojo/view_endpoint.html @@ -103,7 +103,7 @@

  - Finding Age ({{ all_findings|length|apnumber }} verified + Finding Age ({{ all_findings|length|apnumber }} finding{{ all_findings|length|pluralize }})
@@ -178,9 +178,9 @@

{% if item %} {% if item.vulnerable %} - + {% else %} - + {% endif %}  {{ item|url_shortner }}{% if endpoint.is_broken %} 🚩{% endif %} {% endif %} @@ -248,7 +248,7 @@

Additional Information
-

Active Verified Findings

+

Open Findings

{% if findings %}
diff --git a/dojo/tools/checkmarx/parser.py b/dojo/tools/checkmarx/parser.py index d8be5b8b68..4f1f07d725 100755 --- a/dojo/tools/checkmarx/parser.py +++ b/dojo/tools/checkmarx/parser.py @@ -58,7 +58,7 @@ def _get_findings_xml(self, filename, test): language = "" findingdetail = "" group = "" - find_date = parser.parse(root.get("ScanStart")) + find_date = parser.parse(root.get("ScanStart")).date() if query.get("Language") is not None: language = query.get("Language") @@ -389,9 +389,9 @@ def get_findings(self, file, test): def _parse_date(self, value): if isinstance(value, str): - return parser.parse(value) + return parser.parse(value).date() elif isinstance(value, dict) and isinstance(value.get("seconds"), int): - return datetime.datetime.utcfromtimestamp(value.get("seconds")) + return datetime.datetime.utcfromtimestamp(value.get("seconds")).date() else: return None diff --git a/dojo/tools/checkmarx_one/__init__.py b/dojo/tools/checkmarx_one/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/checkmarx_one/parser.py b/dojo/tools/checkmarx_one/parser.py new file mode 100644 index 0000000000..699ac64e42 --- /dev/null +++ b/dojo/tools/checkmarx_one/parser.py @@ -0,0 +1,110 @@ +import datetime +import json +from dateutil import parser +from dojo.models import Finding + + +class CheckmarxOneParser(object): + def get_scan_types(self): + return ["Checkmarx One Scan"] + + def get_label_for_scan_types(self, scan_type): + return scan_type + + def get_description_for_scan_types(self, scan_type): + return "Checkmarx One Scan" + + def _parse_date(self, value): + if isinstance(value, str): + return parser.parse(value) + elif isinstance(value, dict) and isinstance(value.get("seconds"), int): + return datetime.datetime.utcfromtimestamp(value.get("seconds")) + else: + return None + + def get_findings(self, file, test): + data = json.load(file) + findings = [] + if "vulnerabilities" in data: + results = data.get("vulnerabilities", []) + for result in results: + id = result.get("identifiers")[0].get("value") + cwe = None + if 'vulnerabilityDetails' in result: + cwe = result.get("vulnerabilites").get("cweId") + severity = result.get("severity") + locations_uri = result.get("location").get("file") + locations_startLine = result.get("location").get("start_line") + locations_endLine = result.get("location").get("end_line") + finding = Finding( + unique_id_from_tool=id, + file_path=locations_uri, + title=id + "_" + locations_uri, + test=test, + cwe=cwe, + severity=severity, + description="**id**: " + str(id) + "\n" + + "**uri**: " + locations_uri + "\n" + + "**startLine**: " + str(locations_startLine) + "\n" + + "**endLine**: " + str(locations_endLine) + "\n", + false_p=False, + duplicate=False, + out_of_scope=False, + static_finding=True, + dynamic_finding=False, + ) + findings.append(finding) + elif "results" in data: + results = data.get("results", []) + for vulnerability in results: + result_type = vulnerability.get("type") + date = self._parse_date(vulnerability.get("firstFoundAt")) + cwe = None + if 'vulnerabilityDetails' in vulnerability: + cwe = vulnerability.get("vulnerabilites", {}).get("cweId") + if result_type == "sast": + descriptionDetails = vulnerability.get("description") + file_path = vulnerability.get("data").get("nodes")[0].get("fileName") + finding = Finding( + description=descriptionDetails, + title=descriptionDetails, + file_path=file_path, + date=date, + cwe=cwe, + severity=vulnerability.get("severity").title(), + test=test, + static_finding=True, + ) + if vulnerability.get("id"): + finding.unique_id_from_tool = ( + vulnerability.get("id") + ) + else: + finding.unique_id_from_tool = str( + vulnerability.get("similarityId") + ) + findings.append(finding) + if result_type == "kics": + description = vulnerability.get("description") + file_path = vulnerability.get("data").get("filename") + finding = Finding( + title=f'{description}', + description=description, + date=date, + cwe=cwe, + severity=vulnerability.get("severity").title(), + verified=vulnerability.get("state") != "TO_VERIFY", + file_path=file_path, + test=test, + static_finding=True, + ) + if vulnerability.get("id"): + finding.unique_id_from_tool = vulnerability.get( + "id" + ) + else: + finding.unique_id_from_tool = str( + vulnerability.get("similarityId") + ) + findings.append(finding) + return findings diff --git a/dojo/tools/crunch42/__init__.py b/dojo/tools/crunch42/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/crunch42/parser.py b/dojo/tools/crunch42/parser.py new file mode 100644 index 0000000000..e1a841e29a --- /dev/null +++ b/dojo/tools/crunch42/parser.py @@ -0,0 +1,88 @@ +import json +from dojo.models import Finding + + +class Crunch42Parser(object): + + def get_scan_types(self): + return ["Crunch42 Scan"] + + def get_label_for_scan_types(self, scan_type): + return "Crunch42 Scan" + + def get_description_for_scan_types(self, scan_type): + return "Import JSON output of Crunch42 scan report." + + def parse_json(self, json_output): + try: + data = json_output.read() + try: + tree = json.loads(str(data, "utf-8")) + except Exception: + tree = json.loads(data) + except Exception: + raise ValueError("Invalid format") + + return tree + + def process_tree(self, tree, test): + return list(self.get_items(tree, test)) if tree else [] + + def get_findings(self, filename, test): + reportTree = self.parse_json(filename) + + if isinstance(reportTree, list): + temp = [] + for moduleTree in reportTree: + temp += self.process_tree(moduleTree, test) + return temp + else: + return self.process_tree(reportTree, test) + + def get_items(self, tree, test): + items = {} + iterator = 0 + if "report" in tree and tree["report"].get("security"): + results = tree["report"].get("security").get("issues") + for key, node in results.items(): + for issue in node["issues"]: + item = self.get_item( + issue, key, test + ) + items[iterator] = item + iterator += 1 + return list(items.values()) + + def get_item(self, issue, title, test): + fingerprint = issue["fingerprint"] + pointer = issue["pointer"] + message = issue["specificDescription"] if 'specificDescription' in issue else title + score = issue["score"] + criticality = issue["criticality"] + if criticality == 1: + severity = "Info" + elif criticality == 2: + severity = "Low" + elif criticality == 3: + severity = "Medium" + elif criticality <= 4: + severity = "High" + else: + severity = "Critical" + # create the finding object + finding = Finding( + unique_id_from_tool=fingerprint, + title=title, + test=test, + severity=severity, + description="**fingerprint**: " + str(fingerprint) + "\n" + + "**pointer**: " + str(pointer) + "\n" + + "**message**: " + str(message) + "\n" + + "**score**: " + str(score) + "\n", + false_p=False, + duplicate=False, + out_of_scope=False, + static_finding=True, + dynamic_finding=False, + ) + return finding diff --git a/dojo/tools/npm_audit_7_plus/__init__.py b/dojo/tools/npm_audit_7_plus/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/npm_audit_7_plus/parser.py b/dojo/tools/npm_audit_7_plus/parser.py new file mode 100644 index 0000000000..c72c01cfad --- /dev/null +++ b/dojo/tools/npm_audit_7_plus/parser.py @@ -0,0 +1,225 @@ +"""Parser for NPM Audit v7+ Scan.""" +import json +import logging +from dojo.models import Finding + +logger = logging.getLogger(__name__) + +''' +the npm audit json output depends on the params used. this parser +accepts the formats for any of: + +npm audit --json +npm audit fix --dry-run --json +npm audit --dry-run --json + +In order for this parser to import the same number of findings +as the report's meta block indicates, all top level keys +are consiered a vulnerability and as much information as provided +is added to each +''' + + +class NpmAudit7PlusParser(object): + """Represents the parser class.""" + + def get_scan_types(self): + """Return the scan type.""" + return ["NPM Audit v7+ Scan"] + + def get_label_for_scan_types(self, scan_type): + """Return the scan label.""" + return scan_type # no custom label for now + + def get_description_for_scan_types(self, scan_type): + """Return the scan description.""" + return "NPM Audit Scan json output from v7 and above." + + def get_findings(self, json_output, test): + """Return the findings gathered from file upload.""" + tree = self.parse_json(json_output) + return self.get_items(tree, test) + + def parse_json(self, json_output): + """Parse the json format to get findings.""" + if json_output is None: + return + try: + data = json_output.read() + try: + tree = json.loads(str(data, "utf-8")) + except Exception: + tree = json.loads(data) + except Exception: + raise ValueError("Invalid format, unable to parse json.") + + # output from npm audit fix --dry-run --json + if tree.get("audit"): + if not tree.get("audit").get("auditReportVersion"): + raise ValueError( + ("This parser only supports output from npm audit version" + " 7 and above.") + ) + subtree = tree.get("audit").get("vulnerabilities") + # output from npm audit --dry-run --json + # or + # output from npm audit --json + else: + if not tree.get("auditReportVersion"): + raise ValueError( + ("This parser only supports output from npm audit version" + " 7 and above.") + ) + subtree = tree.get("vulnerabilities") + + return subtree + + def get_items(self, tree, test): + """Return the individual items found in report.""" + items = {} + + for key, node in tree.items(): + item = get_item(node, tree, test) + unique_key = item.title + item.severity + items[unique_key] = item + + return list(items.values()) + + +def get_item(item_node, tree, test): + """Return the individual Findigns from items found in report.""" + references = [] + mitigation = "" + test = test + static_finding = True + title = "" + unique_id_from_tool = "" + cvssv3 = "" + cwe = "" + + if item_node["severity"] == "low": + severity = "Low" + elif item_node["severity"] == "moderate": + severity = "Medium" + elif item_node["severity"] == "high": + severity = "High" + elif item_node["severity"] == "critical": + severity = "Critical" + else: + severity = "Info" + + if item_node["via"] and isinstance(item_node["via"][0], str): + # this is a top level key (a vulnerability) + title = item_node["name"] + cwe = "CWE-1035" # default + component_name = title + + elif item_node["via"] and isinstance(item_node["via"][0], dict): + title = item_node["via"][0]["title"] + component_name = item_node["nodes"][0] + cwe = item_node["via"][0]["cwe"][0] + references.append(item_node["via"][0]["url"]) + unique_id_from_tool = str(item_node["via"][0]["source"]) + cvssv3 = item_node["via"][0]["cvss"]["vectorString"] + + if isinstance(item_node["fixAvailable"], dict): + fix_name = item_node["fixAvailable"]["name"] + fix_version = item_node["fixAvailable"]["version"] + mitigation = "Update {0} to version {1}".format(fix_name, fix_version) + else: + mitigation = "No specific mitigation provided by tool." + + description = get_vuln_description(item_node, tree) + + if (item_node["via"] and + isinstance(item_node["via"][0], dict) and + len(item_node["via"]) > 1): + # we have a multiple CWE vuln which we will capture in the + # vulnerability_ids and references + for vuln in item_node["via"][1:]: # have to decide if str or object + if isinstance(vuln, dict): + references.append(vuln["url"]) + + if len(cwe): + cwe = int(cwe.split("-")[1]) + + dojo_finding = Finding( + title=title, + test=test, + severity=severity, + description=description, + cwe=cwe, + mitigation=mitigation, + references=", ".join(references), + component_name=component_name, + false_p=False, + duplicate=False, + out_of_scope=False, + mitigated=None, + impact="No impact provided", + static_finding=static_finding, + dynamic_finding=False, + vuln_id_from_tool=unique_id_from_tool, + ) + + if (cvssv3 is not None) and (len(cvssv3) > 0): + dojo_finding.cvssv3 = cvssv3 + + return dojo_finding + + +def get_vuln_description(item_node, tree): + """Make output pretty of details.""" + effects_handled = [] + description = "" + + description += (item_node["name"] + " " + + item_node["range"] + "\n") + description += "Severity: " + item_node["severity"] + "\n" + + for via in item_node["via"]: + if isinstance(via, str): + description += ("Depends on vulnerable versions of " + + via + "\n") + else: + description += (via["title"] + " - " + via["url"] + "\n") + + if isinstance(item_node["fixAvailable"], dict): + fix_name = item_node["fixAvailable"]["name"] + fix_version = item_node["fixAvailable"]["version"] + mitigation = "Fix Available: Update {0} to version {1}".format( + fix_name, fix_version) + else: + mitigation = "No specific mitigation provided by tool." + + description += mitigation + "\n" + + for node in item_node["nodes"]: + description += node + "\n" + + for effect in item_node["effects"]: + # look up info in the main tree + description += (" " + tree[effect]["name"] + " " + + tree[effect]["range"] + "\n") + effects_handled.append(tree[effect]["name"]) + for ev in tree[effect]["via"]: + if isinstance(ev, dict): + if tree[effect]["name"] != ev["name"]: + description += (" Depends on vulnerable versions of " + + ev["name"] + "\n") + else: + if tree[effect]["name"] != ev: + description += (" Depends on vulnerable versions of " + + ev + "\n") + for en in tree[effect]["nodes"]: + description += " " + en + "\n" + + for ee in tree[effect]["effects"]: + if ee in effects_handled: + continue # already added to description + description += (" " + tree[ee]["name"] + " " + + tree[ee]["range"] + "\n") + for en in tree[effect]["nodes"]: + description += " " + en + "\n" + + return description diff --git a/dojo/tools/pip_audit/parser.py b/dojo/tools/pip_audit/parser.py index 726667987f..4b3ffba9b1 100644 --- a/dojo/tools/pip_audit/parser.py +++ b/dojo/tools/pip_audit/parser.py @@ -1,70 +1,110 @@ +"""Parser for pip-audit.""" import json from dojo.models import Finding class PipAuditParser: + """Represents a file parser capable of ingesting pip-audit results.""" + def get_scan_types(self): + """Return the type of scan this parser ingests.""" return ["pip-audit Scan"] def get_label_for_scan_types(self, scan_type): + """Return the friendly name for this parser.""" return "pip-audit Scan" def get_description_for_scan_types(self, scan_type): + """Return the description for this parser.""" return "Import pip-audit JSON scan report." def requires_file(self, scan_type): + """Return boolean indicating if parser requires a file to process.""" return True def get_findings(self, scan_file, test): + """Return the collection of Findings ingested.""" data = json.load(scan_file) - - findings = list() - for item in data: - vulnerabilities = item.get("vulns", []) - if vulnerabilities: - component_name = item["name"] - component_version = item.get("version") - for vulnerability in vulnerabilities: - vuln_id = vulnerability.get("id") - vuln_fix_versions = vulnerability.get("fix_versions") - vuln_description = vulnerability.get("description") - - title = ( - f"{vuln_id} in {component_name}:{component_version}" - ) - - description = "" - description += vuln_description - - mitigation = None - if vuln_fix_versions: - mitigation = "Upgrade to version:" - if len(vuln_fix_versions) == 1: - mitigation += f" {vuln_fix_versions[0]}" - else: - for fix_version in vuln_fix_versions: - mitigation += f"\n- {fix_version}" - - finding = Finding( - test=test, - title=title, - cwe=1352, - severity="Medium", - description=description, - mitigation=mitigation, - component_name=component_name, - component_version=component_version, - vuln_id_from_tool=vuln_id, - static_finding=True, - dynamic_finding=False, - ) - vulnerability_ids = list() - if vuln_id: - vulnerability_ids.append(vuln_id) - if vulnerability_ids: - finding.unsaved_vulnerability_ids = vulnerability_ids - - findings.append(finding) + findings = None + # this parser can handle two distinct formats see sample scan files + if "dependencies" in data: + # new format of report + findings = get_file_findings(data, test) + else: + # legacy format of report + findings = get_legacy_findings(data, test) return findings + + +def get_file_findings(data, test): + """Return the findings in the vluns array inside the dependencies key.""" + findings = list() + for dependency in data["dependencies"]: + item_findings = get_item_findings(dependency, test) + if item_findings is not None: + findings.extend(item_findings) + return findings + + +def get_legacy_findings(data, test): + """Return the findings gathered from the vulns element.""" + findings = list() + for item in data: + item_findings = get_item_findings(item, test) + if item_findings is not None: + findings.extend(item_findings) + return findings + + +def get_item_findings(item, test): + """Return list of Findings.""" + findings = list() + vulnerabilities = item.get("vulns", []) + if vulnerabilities: + component_name = item["name"] + component_version = item.get("version") + for vulnerability in vulnerabilities: + vuln_id = vulnerability.get("id") + vuln_fix_versions = vulnerability.get("fix_versions") + vuln_description = vulnerability.get("description") + + title = ( + f"{vuln_id} in {component_name}:{component_version}" + ) + + description = "" + description += vuln_description + + mitigation = None + if vuln_fix_versions: + mitigation = "Upgrade to version:" + if len(vuln_fix_versions) == 1: + mitigation += f" {vuln_fix_versions[0]}" + else: + for fix_version in vuln_fix_versions: + mitigation += f"\n- {fix_version}" + + finding = Finding( + test=test, + title=title, + cwe=1395, + severity="Medium", + description=description, + mitigation=mitigation, + component_name=component_name, + component_version=component_version, + vuln_id_from_tool=vuln_id, + static_finding=True, + dynamic_finding=False, + ) + vulnerability_ids = list() + if vuln_id: + vulnerability_ids.append(vuln_id) + if vulnerability_ids: + finding.unsaved_vulnerability_ids = vulnerability_ids + + findings.append(finding) + + return findings diff --git a/dojo/utils.py b/dojo/utils.py index 25cf46d2af..b41c82966a 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1575,7 +1575,12 @@ def __init__(self, product, title=None, tab=None): active=True, mitigated__isnull=True).count() active_endpoints = Endpoint.objects.filter( - product=self.product, finding__active=True, finding__mitigated__isnull=True) + product=self.product, + status_endpoint__mitigated=False, + status_endpoint__false_positive=False, + status_endpoint__out_of_scope=False, + status_endpoint__risk_accepted=False, + ) self.endpoints_count = active_endpoints.distinct().count() self.endpoint_hosts_count = active_endpoints.values('host').distinct().count() self.benchmark_type = Benchmark_Type.objects.filter( diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index e5eb9b0e92..2ce3da7473 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 appVersion: "2.33.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.115-dev +version: 1.6.116-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap diff --git a/unittests/scans/checkmarx_one/checkmarx_one.json b/unittests/scans/checkmarx_one/checkmarx_one.json new file mode 100644 index 0000000000..a9e432abf6 --- /dev/null +++ b/unittests/scans/checkmarx_one/checkmarx_one.json @@ -0,0 +1,284 @@ +{ + "scan": { + "end_time": "2024-01-18T09:12:43", + "analyzer": { + "id": "CxOne-SAST", + "name": "Checkmarx", + "url": "https://checkmarx.com/", + "vendor": { + "name": "Checkmarx" + }, + "version": "2.0.63" + }, + "scanner": { + "id": "CxOne-SAST", + "name": "Checkmarx", + "vendor": { + "name": "Checkmarx" + }, + "version": "2.0.63" + }, + "start_time": "2024-01-18T09:12:43", + "status": "success", + "type": "sast" + }, + "schema": "https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/parsers/security/validators/schemas/15.0.0/sast-report-format.json", + "version": "15.0.0", + "vulnerabilities": [ + { + "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", + "category": "Checkmarx-sast", + "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", + "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Constants.ts:450", + "description": "The application stores data makeKey on the client, in an insecure manner, at line 115 of /src/helpers/Utility.ts.", + "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", + "severity": "Medium", + "confidence": "Medium", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/4c5703d8-dddf-11ee-8275-bb5b871f4ca1/scans?id=56efc3de-dddf-11ee-91f7-17d54222fb10\u0026branch=release%2FRC-6", + "value": "511341974" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/helpers/Constants.ts", + "end_line": 451, + "start_line": 450 + } + ] + }, + "flags": [], + "location": { + "file": "/src/helpers/Constants.ts", + "start_line": 450, + "end_line": 451, + "class": "" + } + }, + { + "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", + "category": "Checkmarx-sast", + "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", + "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Helper.ts:349", + "description": "The application stores data Key on the client, in an insecure manner, at line 349 of /src/helpers/Helper.ts.", + "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", + "severity": "Medium", + "confidence": "Medium", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/7c649cf6-dde0-11ee-a703-43244b0a9879/scans?id=86fc33ea-dde0-11ee-ba5f-3beb4c589dd3\u0026branch=release%2FRC-6", + "value": "832413795" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/helpers/Helper.ts", + "end_line": 350, + "start_line": 339 + } + ] + }, + "flags": [], + "location": { + "file": "/src/helpers/Helper.ts", + "start_line": 349, + "end_line": 350, + "class": "" + } + }, + { + "id": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", + "category": "Checkmarx-sast", + "name": "Use_Of_Hardcoded_Password", + "message": "Use_Of_Hardcoded_Password@/src/pages/UserError_test.tsx:71", + "description": "The application uses the hard-coded password \u0026#34;testPassword\u0026#34; for authentication purposes, either using it to verify users\u0026#39; identities, or to access another remote system. This password at line 71 of /src/pages/UserError_test.tsx appears in the code, implying it is accessible to anyone with source code access, and cannot be changed without rebuilding the application.\n\n", + "cve": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", + "severity": "Low", + "confidence": "Low", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/53d5b99a-dde1-11ee-ab71-9be9755a4da6/scans?id=5e592014-dde1-11ee-8985-f37d989e23db\u0026branch=release%2FRC-6", + "value": "143486243" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/pages/UserError_test.tsx", + "end_line": 72, + "start_line": 71 + } + ] + }, + "flags": [], + "location": { + "file": "/src/pages/UserError_test.tsx", + "start_line": 71, + "end_line": 72, + "class": "" + } + }, + { + "id": "Client_Hardcoded_Domain:/public/index.html:32", + "category": "Checkmarx-sast", + "name": "Client_Hardcoded_Domain", + "message": "Client_Hardcoded_Domain@/public/index.html:32", + "description": "The JavaScript file imported in https://fonts.googleapis.com/icon?family=Material+Icons in /public/index.html at line 32 is from a remote domain, which may allow attackers to replace its contents with malicious code.", + "cve": "Client_Hardcoded_Domain:/public/index.html:32", + "severity": "Info", + "confidence": "Info", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/34480339-8f8c-4b68-b8fb-4eea09a2045d/scans?id=78adc5f1-0864-411e-b8d6-bfa134458bd8\u0026branch=release%2Fpilot-1", + "value": "2595392" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/public/index.html", + "end_line": 87, + "start_line": 32 + } + ] + }, + "flags": [], + "location": { + "file": "/public/index.html", + "start_line": 32, + "end_line": 87, + "class": "" + } + }, + { + "id": "Client_DOM_XSS:/src/app/App_test.tsx:744", + "category": "Checkmarx-sast", + "name": "Client_DOM_XSS", + "message": "Client_DOM_XSS@/src/app/App_test.tsx:744", + "description": "The method TrustMe embeds untrusted data in generated output with location, at line 298 of /src/app/App_test.tsx. This untrusted data is embedded into the output without proper sanitization or encoding, enabling an attacker to inject malicious code into the generated web-page.\n\n", + "cve": "Client_DOM_XSS:/src/app/App_test.tsx:744", + "severity": "Info", + "confidence": "Info", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/38ebbafc-dde2-11ee-ae0c-b72e7e0d42ae/scans?id=42ff549a-dde2-11ee-8c8c-83e0db45059d\u0026branch=release%2FRC-6", + "value": "836714351" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/app/App_test.tsx", + "end_line": 746, + "start_line": 744 + } + ] + }, + "flags": [], + "location": { + "file": "/src/app/App_test.tsx", + "start_line": 744, + "end_line": 746, + "class": "" + } + } + ] +} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/many_findings.json b/unittests/scans/checkmarx_one/many_findings.json new file mode 100644 index 0000000000..13a030e2e3 --- /dev/null +++ b/unittests/scans/checkmarx_one/many_findings.json @@ -0,0 +1,258 @@ +{ + "results": [ + { + "type": "kics", + "label": "IaC Security", + "id": "98727183", + "similarityId": "fbed62efe2786d647806451d0480f57b4bc08786633fb73c29579faee8f9d252", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "HIGH", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-12-26T09:31:48Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "79cd6248-ddcc-11ee-80c3-c34e822ea27f", + "description": "A user should be specified in the dockerfile, otherwise the image will run as root", + "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", + "data": { + "queryId": "94d39580-ddcc-11ee-b570-27d2d85c4cb8 [Taken from query_id]", + "queryName": "Missing User Instruction", + "group": "Build Process [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", + "value": "The 'Dockerfile' does not contain any 'USER' instruction", + "filename": "/qe/testharness/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "28307228", + "similarityId": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "HIGH", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-12-26T09:31:48Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "811759c2-ddd7-11ee-9b56-d34cc93fb257", + "description": "A user should be specified in the dockerfile, otherwise the image will run as root", + "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", + "data": { + "queryId": "5d2efac8-ddd8-11ee-9117-b34a238abecc [Taken from query_id]", + "queryName": "Missing User Instruction", + "group": "Build Process [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", + "value": "The 'Dockerfile' does not contain any 'USER' instruction", + "filename": "/qe/testharness/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "sast", + "label": "sast", + "id": "04894977", + "similarityId": "697307927", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "MEDIUM", + "created": "2023-11-21T09:16:10Z", + "firstFoundAt": "2022-03-17T14:45:41Z", + "foundAt": "2023-11-21T09:16:10Z", + "firstScanId": "9d120bda-ddd8-11ee-bd4c-8b5b82bf6c89", + "description": "Method getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\n\n", + "descriptionHTML": "\u003cp\u003eMethod getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\u003c/p\u003e\n", + "data": { + "queryId": 12956636075206043460, + "queryName": "Privacy_Violation", + "group": "Java_Medium_Threat", + "resultHash": "2417044825981779912395719508", + "languageName": "Java", + "nodes": [ + { + "id": "9823731082518796021644390089", + "line": 96, + "name": "secret", + "column": 48, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "ParamDecl", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", + "typeName": "String", + "methodLine": 96, + "definitions": "1" + }, + { + "id": "ahpeiL2gaeboi8aqueiv8liekah=", + "line": 48, + "name": "secret", + "column": 71, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "UnknownReference", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", + "typeName": "String", + "methodLine": 76, + "definitions": "1" + }, + { + "id": "Aewo6hui2ek5guNgaesie4ioPha=", + "line": 56, + "name": "error", + "column": 27, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "MethodInvokeExpr", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.log.error", + "typeName": "error", + "methodLine": 96, + "definitions": "0" + } + ] + }, + "comments": {}, + "vulnerabilityDetails": { + "cweId": 359, + "cvss": {}, + "compliances": [ + "FISMA 2014", + "NIST SP 800-53", + "OWASP Top 10 2013", + "OWASP Top 10 2017", + "OWASP Top 10 2021", + "PCI DSS v3.2.1", + "ASD STIG 4.10" + ] + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "9930754", + "similarityId": "df0b5ce1f88f1af07e63731e0a9628920a008ea0ca4bbd117d75a3cdbdd283ff", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "MEDIUM", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-08-01T08:30:25Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "eff24b42-ddda-11ee-9e73-83b44de11797", + "description": "Incoming container traffic should be bound to a specific host interface", + "descriptionHTML": "\u003cp\u003eIncoming container traffic should be bound to a specific host interface\u003c/p\u003e\n", + "data": { + "queryId": "fd070ec6-ddda-11ee-a521-73cad7abf17a [Taken from query_id]", + "queryName": "Container Traffic Not Bound To Host Interface", + "group": "Networking and Firewall [Taken from category]", + "line": 16, + "platform": "DockerCompose", + "issueType": "IncorrectValue", + "expectedValue": "Docker compose file to have 'ports' attribute bound to a specific host interface.", + "value": "Docker compose file doesn't have 'ports' attribute bound to a specific host interface", + "filename": "/qe/integration/docker-compose.yml" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "sast", + "label": "sast", + "id": "47966330", + "similarityId": "2994069268", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "LOW", + "created": "2023-11-21T09:16:10Z", + "firstFoundAt": "2023-02-09T09:32:55Z", + "foundAt": "2023-11-21T09:16:10Z", + "firstScanId": "4f9f7b28-dddb-11ee-b736-53a846e9935e", + "description": "Method getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\n\n", + "descriptionHTML": "\u003cp\u003eMethod getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\u003c/p\u003e\n", + "data": { + "queryId": 7846472296093057013, + "queryName": "Heap_Inspection", + "group": "Java_Low_Visibility", + "resultHash": "oochiuquiede0IeVeijaWooTieh=", + "languageName": "Java", + "nodes": [ + { + "id": "Oec6Nie9ool0too4chieNoh5zoo=", + "line": 84, + "name": "testPassword", + "column": 18, + "length": 12, + "method": "getClient", + "nodeID": 6459, + "domType": "Declarator", + "fileName": "/qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java", + "fullName": "com.example.api.integrationtests.utils.IntegratHelper.getClient.testPassword", + "typeName": "char", + "methodLine": 35, + "definitions": "1" + } + ] + }, + "comments": {}, + "vulnerabilityDetails": { + "cweId": 244, + "cvss": {}, + "compliances": [ + "OWASP Top 10 2013", + "OWASP Top 10 2021", + "ASD STIG 4.10" + ] + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "87775678", + "similarityId": "d2b3d5c205f6e52f7588c4ecab08caec2a9d53dc2ded74e1fffd9f2ebf3fa203", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "LOW", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2023-01-05T09:31:43Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "82a21764-dddc-11ee-9364-1f3a853093bf", + "description": "Ensure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working", + "descriptionHTML": "\u003cp\u003eEnsure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working\u003c/p\u003e\n", + "data": { + "queryId": "90b50eba-dddc-11ee-acec-cf20c0abdb94 [Taken from query_id]", + "queryName": "Healthcheck Instruction Missing", + "group": "Insecure Configurations [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "Dockerfile should contain instruction 'HEALTHCHECK'", + "value": "Dockerfile doesn't contain instruction 'HEALTHCHECK'", + "filename": "/qe/unitests/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + } + ], + "totalCount": 6, + "scanID": "fc1ab89e-ddc8-11ee-96d4-97cff7d4e776" +} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/no_findings.json b/unittests/scans/checkmarx_one/no_findings.json new file mode 100644 index 0000000000..c526fa4dc0 --- /dev/null +++ b/unittests/scans/checkmarx_one/no_findings.json @@ -0,0 +1,6 @@ +{ + "results": [ + ], + "totalCount": 0, + "scanID": "4fc677bc-dddd-11ee-8004-6fd4f0411f73" +} \ No newline at end of file diff --git a/unittests/scans/crunch42/crunch42_many_findings.json b/unittests/scans/crunch42/crunch42_many_findings.json new file mode 100644 index 0000000000..1ea3aca89f --- /dev/null +++ b/unittests/scans/crunch42/crunch42_many_findings.json @@ -0,0 +1,251 @@ +{ + "end": "1709535630", + "report": { + "index": [ + "/components/security/ApiKey", + "/paths/~1integration-test~1generate/post/security/0/ApiKeyAuth", + "/paths/~1integration-test~1health/get/security", + "/paths/~1integration-test~1invalidate/delete/security/0/ApiKeyAuth", + "/paths/~1integration-test~1ping/get/security", + "/paths/~1integration-test~1refresh/get/security/0/ApiKeyAuth", + "/paths/~1integration-test~1refresh/put/security/0/ApiKeyAuth", + "/paths/~1integration-test~1verify/get/security/0/ApiKeyAuth" + ], + "assessmentVersion": "3.1.6", + "assessmentReportVersion": "1.0.1", + "commit": "ahso2mom3neiviungoh4ENgahXie2Aer4ain5oba-E", + "oasVersion": "3.0.0", + "apiVersion": "1.0.0", + "fileId": "c65d4166-ddf7-11ee-a7f6-bf9763730afb", + "apiId": "", + "openapiState": "valid", + "score": 82.86, + "valid": true, + "criticality": 4, + "issueCounter": 8, + "minimalReport": false, + "maxEntriesPerIssue": 30, + "maxImpactedPerEntry": 30, + "security": { + "issueCounter": 8, + "score": 12.86, + "criticality": 4, + "issues": { + "v3-global-securityscheme-apikey-inheader": { + "description": "Transporting API keys in a header over network allowed", + "issues": [ + { + "score": 0, + "pointer": 0, + "tooManyImpacted": false, + "criticality": 1, + "request": true, + "fingerprint": "teephei0aes4ohxur7Atie6zuiCh9weeshue0kai" + } + ], + "issueCounter": 1, + "score": 0, + "criticality": 1, + "tooManyError": false + }, + "v3-operation-securityrequirement-apikey-inheader": { + "description": "Operation accepts API keys transported in a header over network", + "issues": [ + { + "score": -2.14, + "pointer": 1, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Iibooquavie0hah0quoh7thooghiith7utoow6th" + }, + { + "score": -2.14, + "pointer": 3, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "roz6Iph0eiPaih1shooPi1geiyuziitei0aiGhed" + }, + { + "score": -2.14, + "pointer": 5, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "lae4iet6XeiyiSheeZof3sheik9lahdaiph7edah" + }, + { + "score": -2.14, + "pointer": 6, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "oNgie5Ieke9fiep6yochaT2ain8oona4xeiphiCh" + }, + { + "score": -2.14, + "pointer": 7, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "aiShievooyi1Gohn1aeque5Mae3aiBoh8oquaphe" + } + ], + "issueCounter": 5, + "score": -10.71, + "criticality": 3, + "tooManyError": false + }, + "v3-operation-securityrequirement-emptyarray": { + "description": "The security section contains an empty array", + "issues": [ + { + "specificDescription": "The security section of the operation 'get' contains an empty array", + "score": -3.21, + "pointer": 2, + "tooManyImpacted": false, + "criticality": 4, + "request": true, + "fingerprint": "oofushaeQuiev6Shegai2roh0ceighae5Daij7pi" + }, + { + "specificDescription": "The security section of the operation 'get' contains an empty array", + "score": -3.21, + "pointer": 4, + "tooManyImpacted": false, + "criticality": 4, + "request": true, + "fingerprint": "Eife6Tu5liequiec8AhZ6booGheegh5oShues2bi" + } + ], + "issueCounter": 2, + "score": -6.43, + "criticality": 4, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "authentication": { + "none": 0, + "info": 1, + "low": 0, + "medium": 5, + "high": 2, + "critical": 0 + }, + "authorization": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "transport": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "data": { + "issueCounter": 0, + "score": 70, + "criticality": 0, + "issues": {}, + "subgroupIssueCounter": { + "parameters": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseHeader": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseDefinition": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "schema": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "paths": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "issuesKey": [ + "v3-operation-securityrequirement-emptyarray", + "v3-global-securityscheme-apikey-inheader", + "v3-operation-securityrequirement-apikey-inheader" + ], + "summary": { + "oasVersion": "3.0.0", + "apiVersion": "1.0.0", + "basepath": "", + "apiName": "Example Authentication Service", + "description": "Authentication Service", + "endpoints": [ + "https://auth-dev-internal.example.com/", + "https://auth-dev-internal.example.com/" + ], + "pathCounter": 1, + "operationCounter": 7, + "parameterCounter": 4, + "requestBodyCounter": 0, + "schemesCounter": { + "https": 7 + }, + "requestContentType": {}, + "responseContentType": { + "application/json": 19 + }, + "securitySchemes": { + "ApiKeyAuth": { + "counterInsecure": 0, + "counterSecure": 5, + "type": "apiKey", + "apiKeyIn": "header", + "apiKeyName": "X-API-Key" + } + }, + "componentsSchemasCounter": 6, + "componentsResponsesCounter": 0, + "componentsParametersCounter": 2, + "componentsExamplesCounter": 0, + "componentsRequestBodiesCounter": 0, + "componentsHeadersCounter": 0, + "componentsSecuritySchemesCounter": 1, + "componentsLinksCounter": 0, + "componentsCallbacksCounter": 0 + } + }, + "start": "1702028474", + "taskId": "0ccd5572-ddf9-11ee-935d-d7d416afd73f" +} \ No newline at end of file diff --git a/unittests/scans/crunch42/crunch42_many_findings2.json b/unittests/scans/crunch42/crunch42_many_findings2.json new file mode 100644 index 0000000000..b9aa1f75fa --- /dev/null +++ b/unittests/scans/crunch42/crunch42_many_findings2.json @@ -0,0 +1,442 @@ +{ + "end": "2131451849", + "report": { + "index": [ + "/definitions/Objects/additionalProperties", + "/definitions/Objects/properties/all_objects/items", + "/definitions/ObjectsList/additionalProperties", + "/definitions/auth_claims", + "/definitions/auth_claims/additionalProperties", + "/definitions/auth_claims/properties/level/format", + "/paths/~1admin~1all_objects/get/parameters/0", + "/paths/~1admin~1all_objects/get/responses/403", + "/paths/~1admin~1all_objects/get/security/0/access-token", + "/paths/~1admin~1objects~1search/get/parameters/0", + "/paths/~1admin~1objects~1search/get/parameters/1", + "/paths/~1admin~1objects~1search/get/responses/403", + "/paths/~1admin~1objects~1search/get/security/0/access-token", + "/paths/~1login/post", + "/paths/~1login/post/parameters/0", + "/paths/~1login/post/parameters/1", + "/paths/~1register/post", + "/paths/~1object~1edit_info/put/parameters/1", + "/paths/~1object~1edit_info/put/responses/403", + "/paths/~1object~1edit_info/put/security/0/access-token", + "/paths/~1object~1info/get/security/0/access-token", + "/securityDefinitions/access-token" + ], + "assessmentVersion": "3.1.6", + "assessmentReportVersion": "1.0.1", + "commit": "theePhohphooQuoh6ii3naiS1Goalee9Chooghei-N", + "oasVersion": "2.0", + "apiVersion": "UAT-JWT-Validation", + "fileId": "2eeb479e-ddfa-11ee-9768-bb6e68d5b5fa", + "apiId": "", + "openapiState": "valid", + "score": 79.94, + "valid": true, + "criticality": 3, + "issueCounter": 13, + "warnings": { + "issues": { + "warning-global-schema-unused": { + "description": "Reusable schema definition is not used in the OpenAPI definition", + "totalIssues": 1, + "issues": [ + { + "pointer": 3, + "specificDescription": "The reusable schema definition 'acme_claims' is not used in the OpenAPI definition", + "fingerprint": "ahthi2Ahshaeghah2iewoo0aiF4quoath5Iej0ku" + } + ], + "tooManyError": false + }, + "warning-sample-undefined": { + "description": "No sample values or examples were provided for API Conformance Scan", + "totalIssues": 5, + "issues": [ + { + "pointer": 17, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "aereePheeb0puh5tahwoshi8Yei9woophahr7koh" + }, + { + "pointer": 9, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "aiseiquohNaik9aThae9oshu8te8ree9Yayie7Ha" + }, + { + "pointer": 10, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "thuf5Imiefe3aeTee4soh8quae8ahtho0ap8wen4" + }, + { + "pointer": 6, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "faeti4aide0ahTho0shiixo5cheipha9Eigahr3s" + }, + { + "pointer": 14, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "Dei9Ahraer7iech8iuk6eeyeero8quea3nahc8ah" + } + ], + "tooManyError": false + }, + "warning-schema-additionalproperties-boolean": { + "description": "Schema defines additionalProperties as a boolean value", + "totalIssues": 3, + "issues": [ + { + "pointer": 2, + "specificDescription": "", + "fingerprint": "shoo1diedoh2aex6mivi9geab9saeyoo7Dae6oth" + }, + { + "pointer": 4, + "specificDescription": "", + "fingerprint": "ooreiz0gepaeSephah6ToN8eC7tioseez4auQu3U" + }, + { + "pointer": 0, + "specificDescription": "", + "fingerprint": "aedaal8uu5aabuohuoSheidoonohSheef2iquee6" + } + ], + "tooManyError": false + }, + "warning-schema-format-improper": { + "description": "Schema format is not applicable to the schema's type", + "totalIssues": 1, + "issues": [ + { + "pointer": 5, + "specificDescription": "The format 'int32' of the schema is not applicable to the schema's type 'number'", + "fingerprint": "va8Lieweu5SieTh1ahcoole0Nahhai5ivaechith" + } + ], + "tooManyError": false + } + } + }, + "operationsNoAuthentication": [ + 13, + 16 + ], + "minimalReport": false, + "maxEntriesPerIssue": 30, + "maxImpactedPerEntry": 30, + "security": { + "issueCounter": 5, + "score": 20, + "criticality": 3, + "issues": { + "global-securityscheme-apikey-inheader": { + "description": "Transporting API keys in a header over network allowed", + "issues": [ + { + "score": 0, + "pointer": 21, + "tooManyImpacted": false, + "criticality": 1, + "request": true, + "fingerprint": "auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje" + } + ], + "issueCounter": 1, + "score": 0, + "criticality": 1, + "tooManyError": false + }, + "operation-securityrequirement-apikey-inheader": { + "description": "Operation accepts API keys transported in a header over network", + "issues": [ + { + "score": -2.5, + "pointer": 8, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Eima0iu4xaatoh1lohboophohpheiBai1iR0opei" + }, + { + "score": -2.5, + "pointer": 12, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Ud1ohcetah5iongai8yee0veishogai2vuQuu7me" + }, + { + "score": -2.5, + "pointer": 19, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "wooN7xoof5bieChie9Aech5ohm4eerae1enu6ohr" + }, + { + "score": -2.5, + "pointer": 20, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "eeliequooliexohfookosang7hooruR4pae9Aiph" + } + ], + "issueCounter": 4, + "score": -10, + "criticality": 3, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "authentication": { + "none": 0, + "info": 1, + "low": 0, + "medium": 4, + "high": 0, + "critical": 0 + }, + "authorization": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "transport": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "data": { + "issueCounter": 8, + "score": 59.94, + "criticality": 3, + "issues": { + "parameter-string-maxlength": { + "description": "String parameter has no maximum length defined", + "issues": [ + { + "specificDescription": "String parameter 'user' has no maximum length defined", + "score": -1.87, + "pointer": 14, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "eeT0queiSahchohc5meik9Zoomoolah6Weo3phes" + }, + { + "specificDescription": "String parameter 'pass' has no maximum length defined", + "score": -1.87, + "pointer": 15, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "ohvieX1AhzuphoocheeVoi0echoGh9coo7thai1o" + } + ], + "issueCounter": 2, + "score": -3.73, + "criticality": 3, + "tooManyError": false + }, + "parameter-string-pattern": { + "description": "String parameter has no pattern defined", + "issues": [ + { + "specificDescription": "String parameter 'user' has no pattern defined", + "score": -2.8, + "pointer": 14, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "oveedeisohwahThae4Ier5oghaebaingai5iqueS" + }, + { + "specificDescription": "String parameter 'pass' has no pattern defined", + "score": -2.8, + "pointer": 15, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "Iyung2laiGaish6kos6quiedeiX5uob3Bozee3mu" + } + ], + "issueCounter": 2, + "score": -5.6, + "criticality": 3, + "tooManyError": false + }, + "response-schema-undefined": { + "description": "Response that should contain a body has no schema defined", + "issues": [ + { + "score": -0.18, + "pointer": 7, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "aeVahquu6chai1beaf9neithu8epha0Ohsh6echi" + }, + { + "score": -0.18, + "pointer": 11, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "ai8Meishei0oHixuSucaiceL0aqu8uocahyahG6l" + }, + { + "score": -0.18, + "pointer": 18, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "euN9zohhohPeesoY8ahbaichae6Ood0nohbio5ke" + } + ], + "issueCounter": 3, + "score": -0.53, + "criticality": 3, + "tooManyError": false + }, + "schema-response-object-without-properties": { + "description": "Schema of a JSON object in a response has no properties defined", + "issues": [ + { + "score": -0.2, + "pointer": 1, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "ufuPheiyaelaePood3AeW8ooc3pooj2AiwaiCeil" + } + ], + "issueCounter": 1, + "score": -0.2, + "criticality": 3, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "parameters": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseHeader": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseDefinition": { + "none": 0, + "info": 0, + "low": 0, + "medium": 3, + "high": 0, + "critical": 0 + }, + "schema": { + "none": 0, + "info": 0, + "low": 0, + "medium": 1, + "high": 0, + "critical": 0 + }, + "paths": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "issuesKey": [ + "schema-response-object-without-properties", + "warning-schema-additionalproperties-boolean", + "parameter-string-pattern", + "parameter-string-maxlength", + "global-securityscheme-apikey-inheader", + "operation-securityrequirement-apikey-inheader", + "response-schema-undefined", + "warning-schema-format-improper", + "warning-sample-undefined", + "warning-global-schema-unused" + ], + "summary": { + "oasVersion": "2.0", + "apiVersion": "UAT-JWT-Validation", + "basepath": "", + "apiName": "Example App API", + "description": "Example Sharing API", + "endpoints": [ + "https//example.asia-1.cloud.provider.com/api" + ], + "pathCounter": 6, + "operationCounter": 6, + "parameterCounter": 4, + "requestBodyCounter": 3, + "schemesCounter": { + "https": 6 + }, + "requestContentType": { + "application/json": 2, + "application/x-www-form-urlencoded": 1 + }, + "responseContentType": { + "application/json": 16 + }, + "securitySchemes": { + "access-token": { + "counterInsecure": 0, + "counterSecure": 4, + "type": "apiKey", + "apiKeyIn": "header", + "apiKeyName": "x-access-token" + } + }, + "componentsSchemasCounter": 6, + "componentsResponsesCounter": 0, + "componentsParametersCounter": 0, + "componentsExamplesCounter": 0, + "componentsRequestBodiesCounter": 0, + "componentsHeadersCounter": 0, + "componentsSecuritySchemesCounter": 0, + "componentsLinksCounter": 0, + "componentsCallbacksCounter": 0 + } + }, + "start": "1693265564", + "taskId": "970e33ac-ddfc-11ee-a42e-af596b69b8f4" +} \ No newline at end of file diff --git a/unittests/scans/npm_audit_7_plus/many_vulns.json b/unittests/scans/npm_audit_7_plus/many_vulns.json new file mode 100644 index 0000000000..2831c8be15 --- /dev/null +++ b/unittests/scans/npm_audit_7_plus/many_vulns.json @@ -0,0 +1,188 @@ +{ + "auditReportVersion": 2, + "vulnerabilities": { + "@vercel/fun": { + "name": "@vercel/fun", + "severity": "moderate", + "isDirect": false, + "via": [ + "debug", + "semver" + ], + "effects": [ + "vercel" + ], + "range": "<=1.1.0", + "nodes": [ + "node_modules/@vercel/fun" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + }, + "@vercel/node": { + "name": "@vercel/node", + "severity": "low", + "isDirect": false, + "via": [ + "undici" + ], + "effects": [ + "vercel" + ], + "range": "2.14.0 || >=3.0.2", + "nodes": [ + "node_modules/@vercel/node" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + }, + "debug": { + "name": "debug", + "severity": "moderate", + "isDirect": false, + "via": [ + { + "source": 1094219, + "name": "debug", + "dependency": "debug", + "title": "Regular Expression Denial of Service in debug", + "url": "https://github.com/advisories/GHSA-gxpj-cx7g-858c", + "severity": "moderate", + "cwe": [ + "CWE-400" + ], + "cvss": { + "score": 5.3, + "vectorString": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" + }, + "range": ">=4.0.0 <4.3.1" + } + ], + "effects": [ + "@vercel/fun" + ], + "range": "4.0.0 - 4.3.0", + "nodes": [ + "node_modules/@vercel/fun/node_modules/debug" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + }, + "semver": { + "name": "semver", + "severity": "moderate", + "isDirect": false, + "via": [ + { + "source": 1096482, + "name": "semver", + "dependency": "semver", + "title": "semver vulnerable to Regular Expression Denial of Service", + "url": "https://github.com/advisories/GHSA-c2qf-rxjj-qqgw", + "severity": "moderate", + "cwe": [ + "CWE-1333" + ], + "cvss": { + "score": 5.3, + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" + }, + "range": ">=7.0.0 <7.5.2" + } + ], + "effects": [ + "@vercel/fun" + ], + "range": "7.0.0 - 7.5.1", + "nodes": [ + "node_modules/@vercel/fun/node_modules/semver" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + }, + "undici": { + "name": "undici", + "severity": "low", + "isDirect": false, + "via": [ + { + "source": 1096586, + "name": "undici", + "dependency": "undici", + "title": "Undici proxy-authorization header not cleared on cross-origin redirect in fetch", + "url": "https://github.com/advisories/GHSA-3787-6prv-h9w3", + "severity": "low", + "cwe": [ + "CWE-200" + ], + "cvss": { + "score": 3.9, + "vectorString": "CVSS:3.1/AV:N/AC:H/PR:H/UI:R/S:U/C:L/I:L/A:L" + }, + "range": "<=5.28.2" + } + ], + "effects": [ + "@vercel/node" + ], + "range": "<=5.28.2", + "nodes": [ + "node_modules/undici" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + }, + "vercel": { + "name": "vercel", + "severity": "moderate", + "isDirect": true, + "via": [ + "@vercel/fun", + "@vercel/node" + ], + "effects": [], + "range": "28.12.3 || 29.0.1 - 29.0.3 || >=32.0.2", + "nodes": [ + "node_modules/vercel" + ], + "fixAvailable": { + "name": "vercel", + "version": "32.3.0", + "isSemVerMajor": true + } + } + }, + "metadata": { + "vulnerabilities": { + "info": 0, + "low": 2, + "moderate": 4, + "high": 0, + "critical": 0, + "total": 6 + }, + "dependencies": { + "prod": 737, + "dev": 306, + "optional": 153, + "peer": 50, + "peerOptional": 0, + "total": 1180 + } + } +} diff --git a/unittests/scans/npm_audit_7_plus/no_vuln.json b/unittests/scans/npm_audit_7_plus/no_vuln.json new file mode 100644 index 0000000000..9f407cae89 --- /dev/null +++ b/unittests/scans/npm_audit_7_plus/no_vuln.json @@ -0,0 +1,23 @@ +{ + "auditReportVersion": 2, + "vulnerabilities": { + }, + "metadata": { + "vulnerabilities": { + "info": 0, + "low": 0, + "moderate": 0, + "high": 0, + "critical": 0, + "total": 0 + }, + "dependencies": { + "prod": 98, + "dev": 0, + "optional": 0, + "peer": 0, + "peerOptional": 0, + "total": 97 + } + } +} diff --git a/unittests/scans/npm_audit_7_plus/one_vuln.json b/unittests/scans/npm_audit_7_plus/one_vuln.json new file mode 100644 index 0000000000..89b48e280f --- /dev/null +++ b/unittests/scans/npm_audit_7_plus/one_vuln.json @@ -0,0 +1,75 @@ +{ + "auditReportVersion": 2, + "vulnerabilities": { + "debug": { + "name": "debug", + "severity": "high", + "isDirect": true, + "via": [ + { + "source": 1094222, + "name": "debug", + "dependency": "debug", + "title": "Regular Expression Denial of Service in debug", + "url": "https://github.com/advisories/GHSA-gxpj-cx7g-858c", + "severity": "moderate", + "cwe": [ + "CWE-400" + ], + "cvss": { + "score": 5.3, + "vectorString": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L" + }, + "range": "<2.6.9" + }, + { + "source": 1094457, + "name": "debug", + "dependency": "debug", + "title": "debug Inefficient Regular Expression Complexity vulnerability", + "url": "https://github.com/advisories/GHSA-9vvw-cc9w-f27h", + "severity": "high", + "cwe": [ + "CWE-1333" + ], + "cvss": { + "score": 7.5, + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" + }, + "range": "<2.6.9" + }, + "ms" + ], + "effects": [ + ], + "range": "<=2.6.8", + "nodes": [ + "node_modules/debug" + ], + "fixAvailable": { + "name": "express", + "version": "4.18.3", + "isSemVerMajor": false + } + } + }, + "metadata": { + "vulnerabilities": { + "info": 0, + "low": 0, + "moderate": 0, + "high": 1, + "critical": 0, + "total": 1 + }, + "dependencies": { + "prod": 98, + "dev": 0, + "optional": 0, + "peer": 0, + "peerOptional": 0, + "total": 97 + } + } +} + diff --git a/unittests/scans/pip_audit/empty_new.json b/unittests/scans/pip_audit/empty_new.json new file mode 100644 index 0000000000..45f00a3dec --- /dev/null +++ b/unittests/scans/pip_audit/empty_new.json @@ -0,0 +1,3 @@ +{ + "dependencies":[] +} diff --git a/unittests/scans/pip_audit/many_vulns_new.json b/unittests/scans/pip_audit/many_vulns_new.json new file mode 100644 index 0000000000..877ebf78ed --- /dev/null +++ b/unittests/scans/pip_audit/many_vulns_new.json @@ -0,0 +1,91 @@ +{ + "dependencies":[ + { + "name": "adal", + "version": "1.2.2", + "vulns": [] + }, + { + "name": "aiohttp", + "version": "3.6.2", + "vulns": [ + { + "id": "PYSEC-2021-76", + "fix_versions": [ + "3.7.4" + ], + "description": "aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows \"pip install aiohttp >= 3.7.4\". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications." + } + ] + }, + { + "name": "alabaster", + "version": "0.7.12", + "vulns": [] + }, + { + "name": "azure-devops", + "skip_reason": "Dependency not found on PyPI and could not be audited: azure-devops (0.17.0)" + }, + { + "name": "django", + "version": "3.2.9", + "vulns": [ + { + "id": "PYSEC-2021-439", + "fix_versions": [ + "2.2.25", + "3.1.14", + "3.2.10" + ], + "description": "In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths." + } + ] + }, + { + "name": "lxml", + "version": "4.6.4", + "vulns": [ + { + "id": "PYSEC-2021-852", + "fix_versions": [], + "description": "lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available." + } + ] + }, + { + "name": "twisted", + "version": "18.9.0", + "vulns": [ + { + "id": "PYSEC-2019-128", + "fix_versions": [ + "19.2.1" + ], + "description": "In Twisted before 19.2.1, twisted.web did not validate or sanitize URIs or HTTP methods, allowing an attacker to inject invalid characters such as CRLF." + }, + { + "id": "PYSEC-2020-260", + "fix_versions": [ + "20.3.0rc1" + ], + "description": "In Twisted Web through 19.10.0, there was an HTTP request splitting vulnerability. When presented with a content-length and a chunked encoding header, the content-length took precedence and the remainder of the request body was interpreted as a pipelined request." + }, + { + "id": "PYSEC-2019-129", + "fix_versions": [ + "19.7.0rc1" + ], + "description": "In words.protocols.jabber.xmlstream in Twisted through 19.2.1, XMPP support did not verify certificates when used with TLS, allowing an attacker to MITM connections." + }, + { + "id": "PYSEC-2020-259", + "fix_versions": [ + "20.3.0rc1" + ], + "description": "In Twisted Web through 19.10.0, there was an HTTP request splitting vulnerability. When presented with two content-length headers, it ignored the first header. When the second content-length value was set to zero, the request body was interpreted as a pipelined request." + } + ] + } + ] +} diff --git a/unittests/scans/pip_audit/zero_vulns_new.json b/unittests/scans/pip_audit/zero_vulns_new.json new file mode 100644 index 0000000000..f32e9b1b25 --- /dev/null +++ b/unittests/scans/pip_audit/zero_vulns_new.json @@ -0,0 +1,18 @@ +{ + "dependencies":[ + { + "name": "adal", + "version": "1.2.2", + "vulns": [] + }, + { + "name": "alabaster", + "version": "0.7.12", + "vulns": [] + }, + { + "name": "azure-devops", + "skip_reason": "Dependency not found on PyPI and could not be audited: azure-devops (0.17.0)" + } + ] +} diff --git a/unittests/tools/test_checkmarx_one_parser.py b/unittests/tools/test_checkmarx_one_parser.py new file mode 100644 index 0000000000..31d6fdbed5 --- /dev/null +++ b/unittests/tools/test_checkmarx_one_parser.py @@ -0,0 +1,47 @@ +from dojo.models import Test +from dojo.tools.checkmarx_one.parser import CheckmarxOneParser +from ..dojo_test_case import DojoTestCase + + +class TestCheckmarxOneParser(DojoTestCase): + + def test_checkmarx_one_many_vulns(self): + with open("unittests/scans/checkmarx_one/checkmarx_one.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(5, len(findings)) + with self.subTest(i=0): + for finding in findings: + self.assertIsNotNone(finding.unique_id_from_tool) + self.assertIsNotNone(finding.title) + self.assertIsNotNone(finding.test) + self.assertIsNotNone(finding.date) + self.assertIsNotNone(finding.severity) + self.assertIsNotNone(finding.description) + finding_test = findings[0] + self.assertEqual("Medium", finding_test.severity) + self.assertEqual("/src/helpers/Constants.ts", finding_test.file_path) + + def test_checkmarx_one_many_findings(self): + with open("unittests/scans/checkmarx_one/many_findings.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(6, len(findings)) + with self.subTest(i=0): + for finding in findings: + self.assertIsNotNone(finding.unique_id_from_tool) + self.assertIsNotNone(finding.title) + self.assertIsNotNone(finding.test) + self.assertIsNotNone(finding.date) + self.assertIsNotNone(finding.severity) + self.assertIsNotNone(finding.description) + finding_test = findings[0] + self.assertEqual("High", finding_test.severity) + self.assertEqual("/qe/testharness/Dockerfile", finding_test.file_path) + + def test_checkmarx_one_no_findings(self): + with open("unittests/scans/checkmarx_one/no_findings.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(0, len(findings)) diff --git a/unittests/tools/test_checkmarx_parser.py b/unittests/tools/test_checkmarx_parser.py index c43e24fb57..f09e7d7da1 100644 --- a/unittests/tools/test_checkmarx_parser.py +++ b/unittests/tools/test_checkmarx_parser.py @@ -203,8 +203,8 @@ def check_parse_file_with_single_vulnerability_has_single_finding(self, findings item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -293,7 +293,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) + self.assertEqual(datetime.date(2018, 2, 25), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) @@ -312,7 +312,7 @@ def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) + self.assertEqual(datetime.date(2018, 2, 25), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) self.assertEqual(50, finding.line) @@ -516,8 +516,8 @@ def check_parse_file_with_utf8_replacement_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -665,8 +665,8 @@ def check_parse_file_with_utf8_various_non_ascii_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -685,8 +685,8 @@ def test_file_with_multiple_findings_is_aggregated_with_query_id(self, mock): # ScanStart self.assertEqual("Client Potential ReDoS In Match (prettify.js)", finding.title) self.assertEqual("Low", finding.severity) - self.assertEqual(datetime.datetime, type(finding.date)) - self.assertEqual(datetime.datetime(2021, 11, 17, 13, 50, 45), finding.date) + self.assertEqual(datetime.date, type(finding.date)) + self.assertEqual(datetime.date(2021, 11, 17), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -705,8 +705,8 @@ def test_file_with_empty_filename(self, mock): # ScanStart self.assertEqual("Missing HSTS Header", finding.title) self.assertEqual("Medium", finding.severity) - self.assertEqual(datetime.datetime, type(finding.date)) - self.assertEqual(datetime.datetime(2021, 12, 24, 9, 12, 14), finding.date) + self.assertEqual(datetime.date, type(finding.date)) + self.assertEqual(datetime.date(2021, 12, 24), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -791,7 +791,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/challenges/challenge5/Assignment5.java", finding.file_path) self.assertEqual(61, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) if finding.unique_id_from_tool == "SYlu22e7ZQydKJFOlC/o1EsyixQ=": with self.subTest(i="SYlu22e7ZQydKJFOlC/o1EsyixQ="): self.assertEqual("SQL Injection", finding.title) @@ -799,7 +799,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/sql-injection/src/main/java/org/owasp/webgoat/sql_injection/introduction/SqlInjectionLesson5.java", finding.file_path) self.assertEqual(72, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) # test one in SCA part if finding.unique_id_from_tool == "GkVx1zoIKcd1EF72zqWrGzeVTmo=": with self.subTest(i="GkVx1zoIKcd1EF72zqWrGzeVTmo="): @@ -812,7 +812,7 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertIsNone(finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) # test one in KICS part if finding.unique_id_from_tool == "eZrh18HAPbe2LbDAprSPrwncAC0=": with self.subTest(i="eZrh18HAPbe2LbDAprSPrwncAC0="): @@ -822,4 +822,26 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertEqual("/webgoat-server/Dockerfile", finding.file_path) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) + + @patch('dojo.tools.checkmarx.parser.add_language') + def test_finding_date_should_be_date_xml(self, mock): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx/single_finding.xml" + ) + parser = CheckmarxParser() + parser.set_mode('detailed') + findings = parser.get_findings(my_file_handle, test) + self.teardown(my_file_handle) + self.assertEqual(findings[0].date, datetime.date(2018, 2, 25)) + + @patch('dojo.tools.checkmarx.parser.add_language') + def test_finding_date_should_be_date_json(self, mock): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx/multiple_findings.json" + ) + parser = CheckmarxParser() + parser.set_mode('detailed') + findings = parser.get_findings(my_file_handle, test) + self.teardown(my_file_handle) + self.assertEqual(findings[0].date, datetime.date(2022, 2, 25)) diff --git a/unittests/tools/test_crunch42_parser.py b/unittests/tools/test_crunch42_parser.py new file mode 100644 index 0000000000..ea5188d303 --- /dev/null +++ b/unittests/tools/test_crunch42_parser.py @@ -0,0 +1,32 @@ +from ..dojo_test_case import DojoTestCase +from dojo.models import Test +from dojo.tools.crunch42.parser import Crunch42Parser + + +class TestCrunch42Parser(DojoTestCase): + + def test_crunch42parser_single_has_many_findings(self): + testfile = open("unittests/scans/crunch42/crunch42_many_findings.json") + parser = Crunch42Parser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(8, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("teephei0aes4ohxur7Atie6zuiCh9weeshue0kai", finding.unique_id_from_tool) + self.assertEqual("Info", finding.severity) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + + def test_crunch42parser_single_has_many_findings2(self): + testfile = open("unittests/scans/crunch42/crunch42_many_findings2.json") + parser = Crunch42Parser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(5, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje", finding.unique_id_from_tool) + self.assertEqual("Info", finding.severity) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) diff --git a/unittests/tools/test_npm_audit_7_plus_parser.py b/unittests/tools/test_npm_audit_7_plus_parser.py new file mode 100644 index 0000000000..cf1cb339e7 --- /dev/null +++ b/unittests/tools/test_npm_audit_7_plus_parser.py @@ -0,0 +1,41 @@ +from os import path +from ..dojo_test_case import DojoTestCase +from dojo.tools.npm_audit_7_plus.parser import NpmAudit7PlusParser +from dojo.models import Test + + +class TestNpmAudit7PlusParser(DojoTestCase): + def test_npm_audit_7_plus_parser_with_no_vuln_has_no_findings(self): + testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/no_vuln.json")) + parser = NpmAudit7PlusParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) + + def test_npm_audit_7_plus_parser_with_one_vuln_has_one_findings(self): + testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/one_vuln.json")) + parser = NpmAudit7PlusParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(1, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("High", finding.severity) + self.assertEqual(400, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L", finding.cvssv3) + + def test_npm_audit_7_plus_parser_with_many_vuln_has_many_findings(self): + testfile = open(path.join(path.dirname(__file__), "../scans/npm_audit_7_plus/many_vulns.json")) + parser = NpmAudit7PlusParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(6, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("Medium", finding.severity) + self.assertEqual(1035, finding.cwe) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + self.assertEqual("@vercel/fun", finding.title) diff --git a/unittests/tools/test_pip_audit_parser.py b/unittests/tools/test_pip_audit_parser.py index eb421f761a..237945cfc6 100644 --- a/unittests/tools/test_pip_audit_parser.py +++ b/unittests/tools/test_pip_audit_parser.py @@ -7,80 +7,83 @@ class TestPipAuditParser(DojoTestCase): def test_parser_empty(self): - testfile = open("unittests/scans/pip_audit/empty.json") - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) + testfiles = ["unittests/scans/pip_audit/empty.json", + "unittests/scans/pip_audit/empty_new.json"] + for path in testfiles: + testfile = open(path) + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) def test_parser_zero_findings(self): - testfile = open("unittests/scans/pip_audit/zero_vulns.json") - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(0, len(findings)) + testfiles = ["unittests/scans/pip_audit/zero_vulns.json", + "unittests/scans/pip_audit/zero_vulns_new.json"] + for path in testfiles: + testfile = open(path) + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(0, len(findings)) def test_parser_many_vulns(self): - testfile = open("unittests/scans/pip_audit/many_vulns.json") - parser = PipAuditParser() - findings = parser.get_findings(testfile, Test()) - testfile.close() - self.assertEqual(7, len(findings)) + testfiles = ["unittests/scans/pip_audit/many_vulns.json", + "unittests/scans/pip_audit/many_vulns_new.json"] + for path in testfiles: + testfile = open(path) + parser = PipAuditParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(7, len(findings)) - finding = findings[0] - self.assertEqual('PYSEC-2021-76 in aiohttp:3.6.2', finding.title) - description = 'aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows "pip install aiohttp >= 3.7.4". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications.' - self.assertEqual(description, finding.description) - self.assertEqual(1352, finding.cwe) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-76', vulnerability_ids[0]) - self.assertEqual('Medium', finding.severity) - self.assertEqual('Upgrade to version: 3.7.4', finding.mitigation) - self.assertEqual('aiohttp', finding.component_name) - self.assertEqual('3.6.2', finding.component_version) - self.assertEqual('PYSEC-2021-76', finding.vuln_id_from_tool) + finding = findings[0] + self.assertEqual('PYSEC-2021-76 in aiohttp:3.6.2', finding.title) + description = 'aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows "pip install aiohttp >= 3.7.4". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications.' + self.assertEqual(description, finding.description) + self.assertEqual(1395, finding.cwe) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-76', vulnerability_ids[0]) + self.assertEqual('Medium', finding.severity) + self.assertEqual('Upgrade to version: 3.7.4', finding.mitigation) + self.assertEqual('aiohttp', finding.component_name) + self.assertEqual('3.6.2', finding.component_version) + self.assertEqual('PYSEC-2021-76', finding.vuln_id_from_tool) - finding = findings[1] - self.assertEqual('PYSEC-2021-439 in django:3.2.9', finding.title) - description = 'In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths.' - self.assertEqual(description, finding.description) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-439', vulnerability_ids[0]) - self.assertEqual(1352, finding.cwe) - self.assertEqual('Medium', finding.severity) - mitigation = '''Upgrade to version: -- 2.2.25 -- 3.1.14 -- 3.2.10''' - self.assertEqual(mitigation, finding.mitigation) - self.assertEqual('django', finding.component_name) - self.assertEqual('3.2.9', finding.component_version) - self.assertEqual('PYSEC-2021-439', finding.vuln_id_from_tool) + finding = findings[1] + self.assertEqual('PYSEC-2021-439 in django:3.2.9', finding.title) + description = 'In Django 2.2 before 2.2.25, 3.1 before 3.1.14, and 3.2 before 3.2.10, HTTP requests for URLs with trailing newlines could bypass upstream access control based on URL paths.' + self.assertEqual(description, finding.description) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-439', vulnerability_ids[0]) + self.assertEqual(1395, finding.cwe) + self.assertEqual('Medium', finding.severity) + self.assertEqual('django', finding.component_name) + self.assertEqual('3.2.9', finding.component_version) + self.assertEqual('PYSEC-2021-439', finding.vuln_id_from_tool) - finding = findings[2] - self.assertEqual('PYSEC-2021-852 in lxml:4.6.4', finding.title) - description = 'lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available.' - self.assertEqual(description, finding.description) - vulnerability_ids = finding.unsaved_vulnerability_ids - self.assertEqual(1, len(vulnerability_ids)) - self.assertEqual('PYSEC-2021-852', vulnerability_ids[0]) - self.assertEqual(1352, finding.cwe) - self.assertEqual('Medium', finding.severity) - self.assertIsNone(finding.mitigation) - self.assertEqual('lxml', finding.component_name) - self.assertEqual('4.6.4', finding.component_version) - self.assertEqual('PYSEC-2021-852', finding.vuln_id_from_tool) + finding = findings[2] + self.assertEqual('PYSEC-2021-852 in lxml:4.6.4', finding.title) + description = 'lxml is a library for processing XML and HTML in the Python language. Prior to version 4.6.5, the HTML Cleaner in lxml.html lets certain crafted script content pass through, as well as script content in SVG files embedded using data URIs. Users that employ the HTML cleaner in a security relevant context should upgrade to lxml 4.6.5 to receive a patch. There are no known workarounds available.' + self.assertEqual(description, finding.description) + vulnerability_ids = finding.unsaved_vulnerability_ids + self.assertEqual(1, len(vulnerability_ids)) + self.assertEqual('PYSEC-2021-852', vulnerability_ids[0]) + self.assertEqual(1395, finding.cwe) + self.assertEqual('Medium', finding.severity) + self.assertEqual('lxml', finding.component_name) + self.assertEqual('4.6.4', finding.component_version) + self.assertEqual('PYSEC-2021-852', finding.vuln_id_from_tool) - finding = findings[3] - self.assertEqual('PYSEC-2019-128 in twisted:18.9.0', finding.title) + finding = findings[3] + self.assertEqual('PYSEC-2019-128 in twisted:18.9.0', finding.title) - finding = findings[4] - self.assertEqual('PYSEC-2020-260 in twisted:18.9.0', finding.title) + finding = findings[4] + self.assertEqual('PYSEC-2020-260 in twisted:18.9.0', finding.title) - finding = findings[5] - self.assertEqual('PYSEC-2019-129 in twisted:18.9.0', finding.title) + finding = findings[5] + self.assertEqual('PYSEC-2019-129 in twisted:18.9.0', finding.title) - finding = findings[6] - self.assertEqual('PYSEC-2020-259 in twisted:18.9.0', finding.title) + finding = findings[6] + self.assertEqual('PYSEC-2020-259 in twisted:18.9.0', finding.title) From c42fde189e66ca2f504c2fd8a8703f19fe5c9d2c Mon Sep 17 00:00:00 2001 From: biennd4 Date: Wed, 13 Mar 2024 10:03:31 +0700 Subject: [PATCH 10/21] update doc and remove unused var --- .../parsers/file/checkmarx_cxflow_sast.md | 14 ++++++++++++++ dojo/tools/checkmarx_cxflow_sast/parser.py | 5 ----- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md b/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md index d8eb9f7b3f..b984b7dd69 100644 --- a/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md +++ b/docs/content/en/integrations/parsers/file/checkmarx_cxflow_sast.md @@ -2,6 +2,20 @@ title: "Checkmarx CxFlow SAST" toc_hide: true --- + +CxFlow is a Spring Boot application written by Checkmarx that enables initiations of scans and result orchestration. +CxFlow support interactive with various Checkmarx product. +This parser support JSON format export by bug tracker. + +``` +#YAML +cx-flow: + bug-tracker:Json + +#CLI +--cx-flow.bug-tracker=json +``` + - `Checkmarx CxFlow SAST`: JSON report from Checkmarx Cxflow. ### Sample Scan Data diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 8a13a6961b..a7a0c45af0 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -59,7 +59,6 @@ def get_findings(self, file, test): def _get_findings_json(self, file, test): data = json.load(file) findings = [] - # deepLink = data.get("deepLink") additional_details = data.get("additionalDetails") scan_start_date = additional_details.get("scanStartDate") @@ -135,10 +134,6 @@ def _get_findings_json(self, file, test): return findings - def _get_findings_xml(self): - # TODO: move logic from checkmarx to here - pass - def is_verify(self, state): # Confirmed, urgent verifiedStates = ["2", "3"] From d4723239a4d043b2db326bcd3bbcb47a83c07dad Mon Sep 17 00:00:00 2001 From: d3s34 Date: Tue, 13 Aug 2024 00:51:50 +0700 Subject: [PATCH 11/21] update parser --- dojo/settings/settings.dist.py | 2 +- dojo/tools/checkmarx_cxflow_sast/parser.py | 24 +++++++++++++++---- .../test_checkmarx_cxflow_sast_parser.py | 4 ++-- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index f452e3056f..eb6da83c12 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1493,7 +1493,7 @@ def saml2_attrib_map_format(dict): "Kubescape JSON Importer": DEDUPE_ALGO_HASH_CODE, "Kiuwan SCA Scan": DEDUPE_ALGO_HASH_CODE, "Rapplex Scan": DEDUPE_ALGO_HASH_CODE, - "Checkmarx CxFlow SAST": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, + "Checkmarx CxFlow SAST": DEDUPE_ALGO_HASH_CODE, } # Override the hardcoded settings here via the env var diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index a7a0c45af0..989edc1985 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -1,4 +1,6 @@ import json +from enum import Enum + import dateutil.parser import logging @@ -36,6 +38,20 @@ def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_Path self.paths = paths +class _CheckmarxState(Enum): + def __str__(self): + return str(self.value) + + # 0, 1, 2, 3, 4 + # To verify, Not Exploitable, Confirmed, Urgent, Proposed not exploitable + + ToVerify = 0 + NotExploitable = 1 + Confirmed = 2 + Urgent = 3 + ProposedNotExploitable = 4 + + class CheckmarxCXFlowSastParser(object): def __init__(self): pass @@ -53,6 +69,7 @@ def get_findings(self, file, test): if file.name.strip().lower().endswith(".json"): return self._get_findings_json(file, test) else: + # TODO: support CxXML format logger.warning(f"Not supported file format ${file}") return list() @@ -114,7 +131,6 @@ def _get_findings_json(self, file, test): cwe=int(cwe), date=dateutil.parser.parse(scan_start_date), static_finding=True, - unique_id_from_tool=str(similarity_id) + str(detail_key), test=test, sast_source_object=detail.source.node_object if detail.source is not None else None, sast_sink_object=detail.sink.node_object if detail.sink is not None else None, @@ -136,13 +152,13 @@ def _get_findings_json(self, file, test): def is_verify(self, state): # Confirmed, urgent - verifiedStates = ["2", "3"] + verifiedStates = [_CheckmarxState.ToVerify, _CheckmarxState.Urgent] return state in verifiedStates def is_active(self, state): # To verify, Confirmed, Urgent, Proposed not exploitable - activeStates = ["0", "2", "3", "4"] + activeStates = [_CheckmarxState.ToVerify, _CheckmarxState.Confirmed, _CheckmarxState.Urgent, _CheckmarxState.ProposedNotExploitable] return state in activeStates def is_not_exploitable(self, state): - return state == "1" + return state == _CheckmarxState.NotExploitable diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index 2d0d56bdaa..c8fd683e6b 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -35,7 +35,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertIn("Reflected XSS All Clients", finding.title) self.assertEqual(79, finding.cwe) self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) - self.assertEqual("14660819" + "88", finding.unique_id_from_tool) + self.assertEqual("14660819", finding.unique_id_from_tool) self.assertEqual("getRawParameter", finding.sast_source_object) self.assertEqual("username", finding.sast_sink_object) self.assertEqual("DOS_Login.java", finding.sast_source_file_path) @@ -54,7 +54,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertEqual(True, finding.active) self.assertEqual(False, finding.verified) - def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_11_finding(self): + def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_4_findings(self): my_file_handle, product, engagement, test = self.init( get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/4-findings.json" ) From c1592332c6d8b1d6da513ca1a7f3732bee755084 Mon Sep 17 00:00:00 2001 From: d3s34 Date: Tue, 13 Aug 2024 01:32:57 +0700 Subject: [PATCH 12/21] update parser test --- dojo/settings/.settings.dist.py.sha256sum | 2 +- dojo/tools/checkmarx_cxflow_sast/parser.py | 20 +++---------------- .../test_checkmarx_cxflow_sast_parser.py | 2 -- 3 files changed, 4 insertions(+), 20 deletions(-) diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index d02f0c2b04..d62c1b7551 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -4483a2efbe3cf1bf1c96c333f9bfc65ffe82a2fb20ed86eb4c4f6726cf41fa94 +4243d412ef50b92ce26fbb490603d57d528003fd608cf93ea92ef89072e933b0 diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 989edc1985..3ee53d9886 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -38,20 +38,6 @@ def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_Path self.paths = paths -class _CheckmarxState(Enum): - def __str__(self): - return str(self.value) - - # 0, 1, 2, 3, 4 - # To verify, Not Exploitable, Confirmed, Urgent, Proposed not exploitable - - ToVerify = 0 - NotExploitable = 1 - Confirmed = 2 - Urgent = 3 - ProposedNotExploitable = 4 - - class CheckmarxCXFlowSastParser(object): def __init__(self): pass @@ -152,13 +138,13 @@ def _get_findings_json(self, file, test): def is_verify(self, state): # Confirmed, urgent - verifiedStates = [_CheckmarxState.ToVerify, _CheckmarxState.Urgent] + verifiedStates = ["2", "3"] return state in verifiedStates def is_active(self, state): # To verify, Confirmed, Urgent, Proposed not exploitable - activeStates = [_CheckmarxState.ToVerify, _CheckmarxState.Confirmed, _CheckmarxState.Urgent, _CheckmarxState.ProposedNotExploitable] + activeStates = ["0", "2", "3", "4"] return state in activeStates def is_not_exploitable(self, state): - return state == _CheckmarxState.NotExploitable + return state == "1" diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index c8fd683e6b..aee7883011 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -35,7 +35,6 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertIn("Reflected XSS All Clients", finding.title) self.assertEqual(79, finding.cwe) self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) - self.assertEqual("14660819", finding.unique_id_from_tool) self.assertEqual("getRawParameter", finding.sast_source_object) self.assertEqual("username", finding.sast_sink_object) self.assertEqual("DOS_Login.java", finding.sast_source_file_path) @@ -65,7 +64,6 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_4_findings( self.assertIsNotNone(finding.title) self.assertIsNotNone(finding.date) self.assertIsNotNone(finding.sast_source_object) - self.assertIsNotNone(finding.unique_id_from_tool) self.assertIsNotNone(finding.sast_sink_object) self.assertIsNotNone(finding.sast_source_file_path) self.assertIsNotNone(finding.sast_source_line) From 38319f854f857227333f0534aee9615e7c9d3e3f Mon Sep 17 00:00:00 2001 From: d3s34 Date: Tue, 13 Aug 2024 01:39:12 +0700 Subject: [PATCH 13/21] Revert "update parser test" This reverts commit c1592332c6d8b1d6da513ca1a7f3732bee755084. --- dojo/settings/.settings.dist.py.sha256sum | 2 +- dojo/tools/checkmarx_cxflow_sast/parser.py | 20 ++++++++++++++++--- .../test_checkmarx_cxflow_sast_parser.py | 2 ++ 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index d62c1b7551..d02f0c2b04 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -4243d412ef50b92ce26fbb490603d57d528003fd608cf93ea92ef89072e933b0 +4483a2efbe3cf1bf1c96c333f9bfc65ffe82a2fb20ed86eb4c4f6726cf41fa94 diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 3ee53d9886..989edc1985 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -38,6 +38,20 @@ def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_Path self.paths = paths +class _CheckmarxState(Enum): + def __str__(self): + return str(self.value) + + # 0, 1, 2, 3, 4 + # To verify, Not Exploitable, Confirmed, Urgent, Proposed not exploitable + + ToVerify = 0 + NotExploitable = 1 + Confirmed = 2 + Urgent = 3 + ProposedNotExploitable = 4 + + class CheckmarxCXFlowSastParser(object): def __init__(self): pass @@ -138,13 +152,13 @@ def _get_findings_json(self, file, test): def is_verify(self, state): # Confirmed, urgent - verifiedStates = ["2", "3"] + verifiedStates = [_CheckmarxState.ToVerify, _CheckmarxState.Urgent] return state in verifiedStates def is_active(self, state): # To verify, Confirmed, Urgent, Proposed not exploitable - activeStates = ["0", "2", "3", "4"] + activeStates = [_CheckmarxState.ToVerify, _CheckmarxState.Confirmed, _CheckmarxState.Urgent, _CheckmarxState.ProposedNotExploitable] return state in activeStates def is_not_exploitable(self, state): - return state == "1" + return state == _CheckmarxState.NotExploitable diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index aee7883011..c8fd683e6b 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -35,6 +35,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertIn("Reflected XSS All Clients", finding.title) self.assertEqual(79, finding.cwe) self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) + self.assertEqual("14660819", finding.unique_id_from_tool) self.assertEqual("getRawParameter", finding.sast_source_object) self.assertEqual("username", finding.sast_sink_object) self.assertEqual("DOS_Login.java", finding.sast_source_file_path) @@ -64,6 +65,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_4_findings( self.assertIsNotNone(finding.title) self.assertIsNotNone(finding.date) self.assertIsNotNone(finding.sast_source_object) + self.assertIsNotNone(finding.unique_id_from_tool) self.assertIsNotNone(finding.sast_sink_object) self.assertIsNotNone(finding.sast_source_file_path) self.assertIsNotNone(finding.sast_source_line) From 7584d829286bef88f1cf51ef1b55b9fb198fd02a Mon Sep 17 00:00:00 2001 From: d3s34 Date: Tue, 13 Aug 2024 01:50:11 +0700 Subject: [PATCH 14/21] fix ruff --- dojo/settings/.settings.dist.py.sha256sum | 2 +- dojo/tools/checkmarx_cxflow_sast/parser.py | 36 ++++++------------- .../test_checkmarx_cxflow_sast_parser.py | 2 -- 3 files changed, 12 insertions(+), 28 deletions(-) diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index d02f0c2b04..9ff92e607a 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -4483a2efbe3cf1bf1c96c333f9bfc65ffe82a2fb20ed86eb4c4f6726cf41fa94 +9520e62bd6427c3beeb843031c5c75877c1a2e748f009313f942a1e64c03b9f1 diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 989edc1985..fc27007d6f 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -1,15 +1,14 @@ import json -from enum import Enum +import logging import dateutil.parser -import logging from dojo.models import Finding logger = logging.getLogger(__name__) -class _PathNode(object): +class _PathNode: def __init__(self, file: str, line: str, column: str, node_object: str, length: str, snippet: str): self.file = file self.line = line @@ -30,7 +29,7 @@ def from_json_object(cls, data): ) -class _Path(object): +class _Path: def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_PathNode]): self.sink = sink self.source = source @@ -38,21 +37,7 @@ def __init__(self, sink: _PathNode, source: _PathNode, state: str, paths: [_Path self.paths = paths -class _CheckmarxState(Enum): - def __str__(self): - return str(self.value) - - # 0, 1, 2, 3, 4 - # To verify, Not Exploitable, Confirmed, Urgent, Proposed not exploitable - - ToVerify = 0 - NotExploitable = 1 - Confirmed = 2 - Urgent = 3 - ProposedNotExploitable = 4 - - -class CheckmarxCXFlowSastParser(object): +class CheckmarxCXFlowSastParser: def __init__(self): pass @@ -71,7 +56,7 @@ def get_findings(self, file, test): else: # TODO: support CxXML format logger.warning(f"Not supported file format ${file}") - return list() + return [] def _get_findings_json(self, file, test): data = json.load(file) @@ -106,7 +91,7 @@ def _get_findings_json(self, file, test): sink=_PathNode.from_json_object(result.get("sink")), source=_PathNode.from_json_object(result.get("source")), state=result.get("state"), - paths=list([result[k] for k in path_keys]) + paths=[result[k] for k in path_keys] ) map_paths[str(path.source.line)] = path @@ -140,7 +125,8 @@ def _get_findings_json(self, file, test): severity=severity, file_path=filename, line=detail.sink.line, - false_p=issue.get("details")[detail_key].get("falsePositive") or self.is_not_exploitable(detail.state), + false_p=issue.get("details")[detail_key].get("falsePositive") or self.is_not_exploitable( + detail.state), description=finding_detail, verified=self.is_verify(detail.state), active=self.is_active(detail.state) @@ -152,13 +138,13 @@ def _get_findings_json(self, file, test): def is_verify(self, state): # Confirmed, urgent - verifiedStates = [_CheckmarxState.ToVerify, _CheckmarxState.Urgent] + verifiedStates = ["2", "3"] return state in verifiedStates def is_active(self, state): # To verify, Confirmed, Urgent, Proposed not exploitable - activeStates = [_CheckmarxState.ToVerify, _CheckmarxState.Confirmed, _CheckmarxState.Urgent, _CheckmarxState.ProposedNotExploitable] + activeStates = ["0", "2", "3", "4"] return state in activeStates def is_not_exploitable(self, state): - return state == _CheckmarxState.NotExploitable + return state == "1" diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index c8fd683e6b..aee7883011 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -35,7 +35,6 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertIn("Reflected XSS All Clients", finding.title) self.assertEqual(79, finding.cwe) self.assertEqual(dateutil.parser.parse("Sunday, January 19, 2020 2:40:11 AM"), finding.date) - self.assertEqual("14660819", finding.unique_id_from_tool) self.assertEqual("getRawParameter", finding.sast_source_object) self.assertEqual("username", finding.sast_sink_object) self.assertEqual("DOS_Login.java", finding.sast_source_file_path) @@ -65,7 +64,6 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_4_findings( self.assertIsNotNone(finding.title) self.assertIsNotNone(finding.date) self.assertIsNotNone(finding.sast_source_object) - self.assertIsNotNone(finding.unique_id_from_tool) self.assertIsNotNone(finding.sast_sink_object) self.assertIsNotNone(finding.sast_source_file_path) self.assertIsNotNone(finding.sast_source_line) From 64acef33b571ec5dee0e875cecdd82430fe82d5d Mon Sep 17 00:00:00 2001 From: Nguyen Dinh Bien <44922242+biennd279@users.noreply.github.com> Date: Tue, 13 Aug 2024 02:01:01 +0700 Subject: [PATCH 15/21] Update .settings.dist.py.sha256sum --- dojo/settings/.settings.dist.py.sha256sum | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index 9ff92e607a..d62c1b7551 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -9520e62bd6427c3beeb843031c5c75877c1a2e748f009313f942a1e64c03b9f1 +4243d412ef50b92ce26fbb490603d57d528003fd608cf93ea92ef89072e933b0 From 8f5faa8370527db28dc656605d69e5e20b425bed Mon Sep 17 00:00:00 2001 From: d3s34 Date: Fri, 16 Aug 2024 21:33:59 +0700 Subject: [PATCH 16/21] fix ruff --- dojo/settings/.settings.dist.py.sha256sum | 2 +- dojo/settings/settings.dist.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dojo/settings/.settings.dist.py.sha256sum b/dojo/settings/.settings.dist.py.sha256sum index efc7f51c4f..f3047cd7ee 100644 --- a/dojo/settings/.settings.dist.py.sha256sum +++ b/dojo/settings/.settings.dist.py.sha256sum @@ -1 +1 @@ -6074aab5c4927284f160b4ab0fbb2d4ba6368bca191b28af0af46cd8f90d41ae +d47510ff327c701251e96ef4f6145ed2cdad639180912876bfb2f5d1780e833d diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 31706d9bd0..05aa96f3af 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1272,7 +1272,7 @@ def saml2_attrib_map_format(dict): "Kiuwan SCA Scan": ["description", "severity", "component_name", "component_version", "cwe"], "Rapplex Scan": ["title", "endpoints", "severity"], "AppCheck Web Application Scanner": ["title", "severity"], - "Checkmarx CxFlow SAST": ["vuln_id_from_tool", "file_path", "line"] + "Checkmarx CxFlow SAST": ["vuln_id_from_tool", "file_path", "line"], } # Override the hardcoded settings here via the env var From 3c1128ed27ebd05a24a2b8ee0733a95a1f98c6f9 Mon Sep 17 00:00:00 2001 From: d3s34 Date: Fri, 16 Aug 2024 21:35:19 +0700 Subject: [PATCH 17/21] fix ruff --- dojo/tools/checkmarx_cxflow_sast/parser.py | 6 +++--- .../test_checkmarx_cxflow_sast_parser.py | 20 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index fc27007d6f..1e18821bc6 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -25,7 +25,7 @@ def from_json_object(cls, data): data.get("column"), data.get("object"), data.get("length"), - data.get("snippet") + data.get("snippet"), ) @@ -91,7 +91,7 @@ def _get_findings_json(self, file, test): sink=_PathNode.from_json_object(result.get("sink")), source=_PathNode.from_json_object(result.get("source")), state=result.get("state"), - paths=[result[k] for k in path_keys] + paths=[result[k] for k in path_keys], ) map_paths[str(path.source.line)] = path @@ -129,7 +129,7 @@ def _get_findings_json(self, file, test): detail.state), description=finding_detail, verified=self.is_verify(detail.state), - active=self.is_active(detail.state) + active=self.is_active(detail.state), ) findings.append(finding) diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index aee7883011..19626997c5 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -1,9 +1,9 @@ -from dojo.models import Product, Engagement, Test -from dojo.tools.checkmarx_cxflow_sast.parser import CheckmarxCXFlowSastParser -from ..dojo_test_case import DojoTestCase, get_unit_tests_path - import dateutil.parser +from dojo.models import Engagement, Product, Test +from dojo.tools.checkmarx_cxflow_sast.parser import CheckmarxCXFlowSastParser +from unittests.dojo_test_case import DojoTestCase, get_unit_tests_path + class TestCheckmarxCxflowSast(DojoTestCase): @@ -17,16 +17,16 @@ def init(self, reportFilename): return my_file_handle, product, engagement, test def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings(self): - my_file_handle, product, engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/no_finding.json" + my_file_handle, _, _, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/no_finding.json", ) parser = CheckmarxCXFlowSastParser() findings = parser.get_findings(my_file_handle, test) self.assertEqual(0, len(findings)) def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(self): - my_file_handle, product, engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/1-finding.json" + my_file_handle, _, _, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/1-finding.json", ) parser = CheckmarxCXFlowSastParser() findings = parser.get_findings(my_file_handle, test) @@ -54,8 +54,8 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_1_finding(s self.assertEqual(False, finding.verified) def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_4_findings(self): - my_file_handle, product, engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/4-findings.json" + my_file_handle, _, _, test = self.init( + get_unit_tests_path() + "/scans/checkmarx_cxflow_sast/4-findings.json", ) parser = CheckmarxCXFlowSastParser() findings = parser.get_findings(my_file_handle, test) From 4d99d7ef20950d5681283a88eb1c58362fa55e1e Mon Sep 17 00:00:00 2001 From: biennd4 Date: Sat, 14 Sep 2024 21:04:23 +0700 Subject: [PATCH 18/21] fix ruff #n --- unittests/tools/test_checkmarx_cxflow_sast_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unittests/tools/test_checkmarx_cxflow_sast_parser.py b/unittests/tools/test_checkmarx_cxflow_sast_parser.py index 19626997c5..7481002e3d 100644 --- a/unittests/tools/test_checkmarx_cxflow_sast_parser.py +++ b/unittests/tools/test_checkmarx_cxflow_sast_parser.py @@ -8,7 +8,7 @@ class TestCheckmarxCxflowSast(DojoTestCase): def init(self, reportFilename): - my_file_handle = open(reportFilename) + my_file_handle = open(reportFilename, encoding="utf-8") product = Product() engagement = Engagement() test = Test() From 1972263d26743645bdce9d7871898f6525ed260c Mon Sep 17 00:00:00 2001 From: biennd4 Date: Sat, 14 Sep 2024 23:38:49 +0700 Subject: [PATCH 19/21] trigger ci From 37658a23d1aea1edbea773402405b3e86b26f585 Mon Sep 17 00:00:00 2001 From: biennd4 Date: Fri, 20 Sep 2024 15:22:08 +0700 Subject: [PATCH 20/21] trigger ci From f2502014b4f6e2cb8b5369537cbea2759b4dd441 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 6 Jan 2025 17:05:54 -0600 Subject: [PATCH 21/21] Fix ruff --- dojo/tools/checkmarx_cxflow_sast/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 29283c8933..292bbfc7c5 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -95,7 +95,7 @@ def _get_findings_json(self, file, test): map_paths[str(path.source.line)] = path - for detail_key in issue.get("details").keys(): + for detail_key in issue.get("details"): if detail_key not in map_paths: logger.warning(f"{detail_key} not found in path, ignore") else: