From eb5dec88af9cb655b7f832708c4e44d8fe6374a8 Mon Sep 17 00:00:00 2001 From: kiblik <5609770+kiblik@users.noreply.github.com> Date: Sat, 4 Jan 2025 13:17:52 +0100 Subject: [PATCH] Ruff: Fix PTH118, merge PTH11 --- dojo/forms.py | 2 +- dojo/jira_link/helper.py | 5 +- dojo/models.py | 3 +- dojo/settings/settings.dist.py | 4 +- dojo/tools/factory.py | 2 +- dojo/views.py | 5 +- ruff.toml | 2 +- tests/file_test.py | 6 +-- tests/finding_test.py | 4 +- tests/ibm_appscan_test.py | 2 +- unittests/dojo_test_case.py | 8 +-- unittests/test_endpoint_meta_import.py | 2 +- unittests/test_factory.py | 11 ++-- unittests/test_import_reimport.py | 4 +- unittests/test_importers_importer.py | 4 +- unittests/test_jira_config_engagement_epic.py | 2 +- unittests/test_jira_import_and_pushing_api.py | 2 +- unittests/test_parsers.py | 22 ++++---- .../tools/test_anchore_enterprise_parser.py | 9 ++-- unittests/tools/test_api_cobalt_parser.py | 2 +- .../tools/test_api_sonarqube_importer.py | 18 +++---- unittests/tools/test_api_sonarqube_parser.py | 2 +- unittests/tools/test_appspider_parser.py | 3 +- unittests/tools/test_asff_parser.py | 3 +- unittests/tools/test_awssecurityhub_parser.py | 29 +++++------ .../test_blackduck_binary_analysis_parser.py | 6 +-- .../test_blackduck_component_risk_parser.py | 2 +- unittests/tools/test_blackduck_parser.py | 12 ++--- unittests/tools/test_bundler_audit_parser.py | 5 +- unittests/tools/test_burp_api_parser.py | 6 +-- unittests/tools/test_burp_dastardly_parser.py | 3 +- .../tools/test_burp_enterprise_parser.py | 5 +- unittests/tools/test_burp_graphql_parser.py | 13 +++-- unittests/tools/test_burp_parser.py | 11 ++-- unittests/tools/test_checkmarx_osa_parser.py | 16 +++--- unittests/tools/test_checkmarx_parser.py | 48 +++++++++--------- unittests/tools/test_codechecker_parser.py | 8 +-- .../tools/test_crashtest_security_parser.py | 2 +- unittests/tools/test_dawnscanner_parser.py | 3 +- .../tools/test_dependency_check_parser.py | 3 +- .../tools/test_dependency_track_parser.py | 16 +++--- unittests/tools/test_dockerbench_parser.py | 6 +-- unittests/tools/test_gitleaks_parser.py | 14 +++--- unittests/tools/test_horusec_parser.py | 13 +++-- unittests/tools/test_huskyci_parser.py | 6 +-- unittests/tools/test_kubebench_parser.py | 8 +-- unittests/tools/test_kubescape_parser.py | 6 +-- unittests/tools/test_legitify_parser.py | 6 +-- unittests/tools/test_mend_parser.py | 2 +- .../test_microfocus_webinspect_parser.py | 8 +-- unittests/tools/test_nancy_parser.py | 7 ++- .../tools/test_neuvector_compliance_parser.py | 7 ++- unittests/tools/test_neuvector_parser.py | 7 ++- .../tools/test_npm_audit_7_plus_parser.py | 9 ++-- unittests/tools/test_npm_audit_parser.py | 19 ++++--- unittests/tools/test_ort_parser.py | 2 +- .../tools/test_ossindex_devaudit_parser.py | 30 +++++------ unittests/tools/test_osv_scanner_parser.py | 7 ++- unittests/tools/test_outpost24_parser.py | 6 +-- .../test_php_symfony_security_check_parser.py | 6 +-- .../test_qualys_hacker_guardian_parser.py | 7 ++- .../test_qualys_infrascan_webgui_parser.py | 8 +-- unittests/tools/test_qualys_parser.py | 16 +++--- unittests/tools/test_qualys_webapp_parser.py | 6 +-- unittests/tools/test_rapplex_parser.py | 7 ++- unittests/tools/test_sarif_parser.py | 50 +++++++++---------- .../tools/test_solar_appscreener_parser.py | 6 +-- unittests/tools/test_sonarqube_parser.py | 42 ++++++++-------- unittests/tools/test_spotbugs_parser.py | 18 +++---- unittests/tools/test_sslyze_parser.py | 27 +++++----- unittests/tools/test_tenable_parser.py | 19 ++++--- .../tools/test_threat_composer_parser.py | 17 ++++--- unittests/tools/test_trivy_operator_parser.py | 3 +- unittests/tools/test_trivy_parser.py | 3 +- unittests/tools/test_trufflehog3_parser.py | 3 +- unittests/tools/test_trufflehog_parser.py | 3 +- .../tools/test_trustwave_fusion_api_parser.py | 4 +- unittests/tools/test_trustwave_parser.py | 3 +- unittests/tools/test_twistlock_parser.py | 20 ++++---- unittests/tools/test_xanitizer_parser.py | 2 +- 80 files changed, 358 insertions(+), 390 deletions(-) diff --git a/dojo/forms.py b/dojo/forms.py index 334a958e93..a4306b141e 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -2400,7 +2400,7 @@ def get_jira_issue_template_dir_choices(): for dirname in dirnames: clean_base_dir = base_dir.removeprefix(settings.TEMPLATE_DIR_PREFIX) - template_dir_list.append((os.path.join(clean_base_dir, dirname), dirname)) + template_dir_list.append((str(Path(clean_base_dir) / dirname), dirname)) logger.debug("templates: %s", template_dir_list) return template_dir_list diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 308331987a..d9fa17d0b3 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1,7 +1,6 @@ import io import json import logging -import os from pathlib import Path from typing import Any @@ -333,8 +332,8 @@ def get_jira_issue_template(obj): template_dir = "issue-trackers/jira_full/" if isinstance(obj, Finding_Group): - return os.path.join(template_dir, "jira-finding-group-description.tpl") - return os.path.join(template_dir, "jira-description.tpl") + return Path(template_dir) / "jira-finding-group-description.tpl" + return Path(template_dir) / "jira-description.tpl" def get_jira_creation(obj): diff --git a/dojo/models.py b/dojo/models.py index 99074a9cf3..53c1d79382 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -2,7 +2,6 @@ import copy import hashlib import logging -import os import re import warnings from datetime import datetime @@ -149,7 +148,7 @@ def __call__(self, model_instance, filename): filename += ext if self.directory is None: return filename - return os.path.join(now().strftime(self.directory), filename) + return Path(now().strftime(self.directory)) / filename class Regulation(models.Model): diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 4f72fa171c..6315a45628 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -442,7 +442,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. - os.path.join(Path(DOJO_ROOT).parent, "components", "node_modules"), + Path(DOJO_ROOT).parent / "components" / "node_modules", ) # List of finder classes that know how to find static files in @@ -949,7 +949,7 @@ def saml2_attrib_map_format(dict): "entityid": str(SAML2_ENTITY_ID), # directory with attribute mapping - "attribute_map_dir": path.join(BASEDIR, "attribute-maps"), + "attribute_map_dir": Path(BASEDIR) / "attribute-maps", # do now discard attributes not specified in attribute-maps "allow_unknown_attributes": SAML_ALLOW_UNKNOWN_ATTRIBUTES, # this block states what services we provide diff --git a/dojo/tools/factory.py b/dojo/tools/factory.py index b69fea12ac..daddd62f00 100644 --- a/dojo/tools/factory.py +++ b/dojo/tools/factory.py @@ -117,7 +117,7 @@ def requires_tool_type(scan_type): package_dir = str(Path(__file__).resolve().parent) for module_name in os.listdir(package_dir): # noqa: PTH208 # check if it's dir - if Path(os.path.join(package_dir, module_name)).is_dir(): + if (Path(package_dir) / module_name).is_dir(): try: # check if it's a Python module if find_spec(f"dojo.tools.{module_name}.parser"): diff --git a/dojo/views.py b/dojo/views.py index df65be4d6b..8149671618 100644 --- a/dojo/views.py +++ b/dojo/views.py @@ -1,5 +1,4 @@ import logging -import os from pathlib import Path from auditlog.models import LogEntry @@ -151,7 +150,7 @@ def manage_files(request, oid, obj_type): for o in files_formset.deleted_objects: logger.debug("removing file: %s", o.file.name) - Path(os.path.join(settings.MEDIA_ROOT, o.file.name)).unlink() + (Path(settings.MEDIA_ROOT) / o.file.name).unlink() for o in files_formset.new_objects: logger.debug("adding file: %s", o.file.name) @@ -162,7 +161,7 @@ def manage_files(request, oid, obj_type): finding__isnull=True) for o in orphan_files: logger.debug("purging orphan file: %s", o.file.name) - Path(os.path.join(settings.MEDIA_ROOT, o.file.name)).unlink() + (Path(settings.MEDIA_ROOT) / o.file.name).unlink() o.delete() messages.add_message( diff --git a/ruff.toml b/ruff.toml index 12b556d5cf..dfda6140c8 100644 --- a/ruff.toml +++ b/ruff.toml @@ -66,7 +66,7 @@ select = [ "TCH", "INT", "ARG003", "ARG004", "ARG005", - "PTH2", "PTH101", "PTH102", "PTH103", "PTH104", "PTH105", "PTH106", "PTH107", "PTH108", "PTH109", "PTH110", "PTH111", "PTH112", "PTH113", "PTH114", "PTH115", "PTH116", "PTH117", "PTH119", "PTH120", "PTH121", "PTH122", "PTH124", + "PTH2", "PTH101", "PTH102", "PTH103", "PTH104", "PTH105", "PTH106", "PTH107", "PTH108", "PTH109", "PTH11", "PTH120", "PTH121", "PTH122", "PTH124", "TD001", "TD004", "TD005", "PD", "PGH", diff --git a/tests/file_test.py b/tests/file_test.py index 686f133e4d..8084fd8daa 100644 --- a/tests/file_test.py +++ b/tests/file_test.py @@ -35,7 +35,7 @@ def test_add_file_finding_level(self): driver.find_element(By.LINK_TEXT, "Manage Files").click() # select first file input field: form-0-image # Set full image path for image file 'strange.png - image_path = os.path.join(dir_path, "finding_image.png") + image_path = Path(dir_path) / "finding_image.png" driver.find_element(By.ID, "id_form-0-title").send_keys("Finding Title") driver.find_element(By.ID, "id_form-0-file").send_keys(image_path) # Save uploaded image @@ -76,7 +76,7 @@ def test_add_file_test_level(self): driver.find_element(By.NAME, "Manage Files").click() # select first file input field: form-0-image # Set full image path for image file 'strange.png - image_path = os.path.join(dir_path, "finding_image.png") + image_path = Path(dir_path) / "finding_image.png" driver.find_element(By.ID, "id_form-0-title").send_keys("Test Title") driver.find_element(By.ID, "id_form-0-file").send_keys(image_path) # Save uploaded image @@ -116,7 +116,7 @@ def test_add_file_engagement_level(self): driver.find_element(By.NAME, "Manage Files").click() # select first file input field: form-0-image # Set full image path for image file 'strange.png - image_path = os.path.join(dir_path, "finding_image.png") + image_path = Path(dir_path) / "finding_image.png" driver.find_element(By.ID, "id_form-0-title").send_keys("Engagement Title") driver.find_element(By.ID, "id_form-0-file").send_keys(image_path) # Save uploaded image diff --git a/tests/finding_test.py b/tests/finding_test.py index 4e08744c5e..0adf78f8aa 100644 --- a/tests/finding_test.py +++ b/tests/finding_test.py @@ -146,7 +146,7 @@ def test_add_image(self): driver.find_element(By.LINK_TEXT, "Manage Files").click() # select first file input field: form-0-image # Set full image path for image file 'strange.png - image_path = os.path.join(dir_path, "finding_image.png") + image_path = Path(dir_path) / "finding_image.png" driver.find_element(By.ID, "id_form-0-file").send_keys(image_path) driver.find_element(By.ID, "id_form-0-title").send_keys("Image Title") # Save uploaded image @@ -466,7 +466,7 @@ def test_import_scan_result(self): # Select `Default` as the Environment Select(driver.find_element(By.ID, "id_environment")).select_by_visible_text("Development") # upload scan file - file_path = os.path.join(dir_path, "zap_sample.xml") + file_path = Path(dir_path) / "zap_sample.xml" driver.find_element(By.NAME, "file").send_keys(file_path) # Click Submit button with WaitForPageLoad(driver, timeout=50): diff --git a/tests/ibm_appscan_test.py b/tests/ibm_appscan_test.py index 451e387db1..f03305bd6f 100644 --- a/tests/ibm_appscan_test.py +++ b/tests/ibm_appscan_test.py @@ -31,7 +31,7 @@ def test_import_ibm_app_scan_result(self): # Select `Default` as the Environment Select(driver.find_element(By.ID, "id_environment")).select_by_visible_text("Development") # Upload Scan result file - scanner_file = os.path.join(dir_path, "ibm_appscan_xml_file.xml") + scanner_file = Path(dir_path) / "ibm_appscan_xml_file.xml" driver.find_element(By.NAME, "file").send_keys(scanner_file) # click on upload button driver.find_elements(By.CSS_SELECTOR, "button.btn.btn-primary")[1].click() diff --git a/unittests/dojo_test_case.py b/unittests/dojo_test_case.py index d22073e273..f9b6ee4d7c 100644 --- a/unittests/dojo_test_case.py +++ b/unittests/dojo_test_case.py @@ -40,7 +40,7 @@ def get_unit_tests_path(): - return str(Path(os.path.realpath(__file__)).parent) + return Path(os.path.realpath(__file__)).parent def toggle_system_setting_boolean(flag_name, value): @@ -504,7 +504,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1, product_name=None, product_type_name=None, auto_create_context=None, expected_http_status_code=201, test_title=None, scan_date=None, service=None, forceActive=True, forceVerified=True): - with open(get_unit_tests_path() + "/" + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "" + filename, encoding="utf-8") as testfile: payload = { "minimum_severity": minimum_severity, "active": active, @@ -556,7 +556,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1, def reimport_scan_with_params(self, test_id, filename, scan_type="ZAP Scan", engagement=1, minimum_severity="Low", active=True, verified=False, push_to_jira=None, tags=None, close_old_findings=True, group_by=None, engagement_name=None, scan_date=None, product_name=None, product_type_name=None, auto_create_context=None, expected_http_status_code=201, test_title=None): - with open(get_unit_tests_path() + "/" + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "" + filename, encoding="utf-8") as testfile: payload = { "minimum_severity": minimum_severity, "active": active, @@ -605,7 +605,7 @@ def reimport_scan_with_params(self, test_id, filename, scan_type="ZAP Scan", eng def endpoint_meta_import_scan_with_params(self, filename, product=1, product_name=None, create_endpoints=True, create_tags=True, create_dojo_meta=True, expected_http_status_code=201): - with open(get_unit_tests_path() + "/" + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "" + filename, encoding="utf-8") as testfile: payload = { "create_endpoints": create_endpoints, "create_tags": create_tags, diff --git a/unittests/test_endpoint_meta_import.py b/unittests/test_endpoint_meta_import.py index d159dbd4f2..deee013055 100644 --- a/unittests/test_endpoint_meta_import.py +++ b/unittests/test_endpoint_meta_import.py @@ -206,7 +206,7 @@ def endpoint_meta_import_ui(self, product, payload): def endpoint_meta_import_scan_with_params_ui(self, filename, product=1, create_endpoints=True, create_tags=True, create_dojo_meta=True, expected_http_status_code=201): - with open(get_unit_tests_path() + "/" + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "" + filename, encoding="utf-8") as testfile: payload = { "create_endpoints": create_endpoints, "create_tags": create_tags, diff --git a/unittests/test_factory.py b/unittests/test_factory.py index 5d8b4040dd..6b24e5f760 100644 --- a/unittests/test_factory.py +++ b/unittests/test_factory.py @@ -1,5 +1,4 @@ import logging -import os from importlib import import_module from importlib.util import find_spec from inspect import isclass @@ -16,25 +15,25 @@ class TestFactory(DojoTestCase): def test_get_parser(self): with self.subTest(scan_type="Acunetix Scan"): scan_type = "Acunetix Scan" - testfile = open(get_unit_tests_path() + "/scans/acunetix/one_finding.xml", encoding="utf-8") + testfile = open(get_unit_tests_path() / "scans/acunetix/one_finding.xml", encoding="utf-8") parser = get_parser(scan_type) parser.get_findings(testfile, Test()) testfile.close() with self.subTest(scan_type="Anchore Engine Scan"): scan_type = "Anchore Engine Scan" - testfile = open(get_unit_tests_path() + "/scans/anchore_engine/one_vuln.json", encoding="utf-8") + testfile = open(get_unit_tests_path() / "scans/anchore_engine/one_vuln.json", encoding="utf-8") parser = get_parser(scan_type) parser.get_findings(testfile, Test()) testfile.close() with self.subTest(scan_type="Tenable Scan"): scan_type = "Tenable Scan" - testfile = open(get_unit_tests_path() + "/scans/tenable/nessus/nessus_v_unknown.xml", encoding="utf-8") + testfile = open(get_unit_tests_path() / "scans/tenable/nessus/nessus_v_unknown.xml", encoding="utf-8") parser = get_parser(scan_type) parser.get_findings(testfile, Test()) testfile.close() with self.subTest(scan_type="ZAP Scan"): scan_type = "ZAP Scan" - testfile = open(get_unit_tests_path() + "/scans/zap/some_2.9.0.xml", encoding="utf-8") + testfile = open(get_unit_tests_path() / "scans/zap/some_2.9.0.xml", encoding="utf-8") parser = get_parser(scan_type) parser.get_findings(testfile, Test()) testfile.close() @@ -73,7 +72,7 @@ def test_parser_name_matches_module(self): for module_name in module_names: if module_name in excluded_parsers: continue - if Path(os.path.join(package_dir, module_name)).is_dir(): + if (Path(package_dir) / module_name).is_dir(): found = False if find_spec(f"dojo.tools.{module_name}.parser"): module = import_module(f"dojo.tools.{module_name}.parser") diff --git a/unittests/test_import_reimport.py b/unittests/test_import_reimport.py index 02548ccb57..bc70330226 100644 --- a/unittests/test_import_reimport.py +++ b/unittests/test_import_reimport.py @@ -1823,7 +1823,7 @@ def import_scan_with_params_ui(self, filename, scan_type="ZAP Scan", engagement= elif not verified: verifiedPayload = "force_to_false" - with open(get_unit_tests_path() + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / filename, encoding="utf-8") as testfile: payload = { "minimum_severity": minimum_severity, "active": activePayload, @@ -1861,7 +1861,7 @@ def reimport_scan_with_params_ui(self, test_id, filename, scan_type="ZAP Scan", if not verified: verifiedPayload = "force_to_false" - with open(get_unit_tests_path() + filename, encoding="utf-8") as testfile: + with open(get_unit_tests_path() / filename, encoding="utf-8") as testfile: payload = { "minimum_severity": minimum_severity, "active": activePayload, diff --git a/unittests/test_importers_importer.py b/unittests/test_importers_importer.py index 41baf6d78e..a33249b5a1 100644 --- a/unittests/test_importers_importer.py +++ b/unittests/test_importers_importer.py @@ -39,7 +39,7 @@ class TestDojoDefaultImporter(DojoTestCase): def test_parse_findings(self): - with open(get_unit_tests_path() + "/scans/acunetix/one_finding.xml", encoding="utf-8") as scan: + with open(get_unit_tests_path() / "scans/acunetix/one_finding.xml", encoding="utf-8") as scan: scan_type = "Acunetix Scan" user, _created = User.objects.get_or_create(username="admin") product_type, _created = Product_Type.objects.get_or_create(name="test") @@ -80,7 +80,7 @@ def test_parse_findings(self): self.assertIn(finding.numerical_severity, ["S0", "S1", "S2", "S3", "S4"]) def test_import_scan(self): - with open(get_unit_tests_path() + "/scans/sarif/spotbugs.sarif", encoding="utf-8") as scan: + with open(get_unit_tests_path() / "scans/sarif/spotbugs.sarif", encoding="utf-8") as scan: scan_type = SarifParser().get_scan_types()[0] # SARIF format implement the new method user, _ = User.objects.get_or_create(username="admin") product_type, _ = Product_Type.objects.get_or_create(name="test2") diff --git a/unittests/test_jira_config_engagement_epic.py b/unittests/test_jira_config_engagement_epic.py index 7b6b753416..35153d88e9 100644 --- a/unittests/test_jira_config_engagement_epic.py +++ b/unittests/test_jira_config_engagement_epic.py @@ -27,7 +27,7 @@ def _get_vcr(self, **kwargs): my_vcr.record_mode = "once" my_vcr.path_transformer = VCR.ensure_suffix(".yaml") my_vcr.filter_headers = ["Authorization", "X-Atlassian-Token"] - my_vcr.cassette_library_dir = get_unit_tests_path() + "/vcr/jira/" + my_vcr.cassette_library_dir = get_unit_tests_path() / "vcr/jira/" # filters headers doesn't seem to work for cookies, so use callbacks to filter cookies from being recorded my_vcr.before_record_request = self.before_record_request my_vcr.before_record_response = self.before_record_response diff --git a/unittests/test_jira_import_and_pushing_api.py b/unittests/test_jira_import_and_pushing_api.py index eeba03f974..8a9b601970 100644 --- a/unittests/test_jira_import_and_pushing_api.py +++ b/unittests/test_jira_import_and_pushing_api.py @@ -50,7 +50,7 @@ def _get_vcr(self, **kwargs): my_vcr.record_mode = "once" my_vcr.path_transformer = VCR.ensure_suffix(".yaml") my_vcr.filter_headers = ["Authorization", "X-Atlassian-Token"] - my_vcr.cassette_library_dir = get_unit_tests_path() + "/vcr/jira/" + my_vcr.cassette_library_dir = get_unit_tests_path() / "vcr/jira/" # filters headers doesn't seem to work for cookies, so use callbacks to filter cookies from being recorded my_vcr.before_record_request = self.before_record_request my_vcr.before_record_response = self.before_record_response diff --git a/unittests/test_parsers.py b/unittests/test_parsers.py index 2e61c48273..3ce370da93 100644 --- a/unittests/test_parsers.py +++ b/unittests/test_parsers.py @@ -5,13 +5,13 @@ from .dojo_test_case import DojoTestCase, get_unit_tests_path -basedir = os.path.join(get_unit_tests_path(), "..") +basedir = get_unit_tests_path().parent @test_tag("parser-supplement-tests") class TestParsers(DojoTestCase): def test_file_existence(self): - for parser_dir in os.scandir(os.path.join(basedir, "dojo", "tools")): + for parser_dir in os.scandir(Path(basedir) / "dojo" / "tools"): if parser_dir.is_file() or parser_dir.name == "__pycache__": continue # this is not parser dir but some support file @@ -28,7 +28,7 @@ def test_file_existence(self): "wizcli_common_parsers", # common class for other wizcli parsers ]: with self.subTest(parser=parser_dir.name, category="docs"): - doc_file = os.path.join(basedir, "docs", "content", "en", "connecting_your_tools", "parsers", category, f"{doc_name}.md") + doc_file = Path(basedir) / "docs" / "content" / "en" / "connecting_your_tools" / "parsers" / category / f"{doc_name}.md" self.assertTrue( Path(doc_file).is_file(), f"Documentation file '{doc_file}' is missing or using different name", @@ -53,7 +53,7 @@ def test_file_existence(self): "wizcli_common_parsers", # common class for other wizcli parsers ]: with self.subTest(parser=parser_dir.name, category="parser"): - parser_test_file = os.path.join(basedir, "unittests", "tools", f"test_{parser_dir.name}_parser.py") + parser_test_file = Path(basedir) / "unittests" / "tools" / f"test_{parser_dir.name}_parser.py" self.assertTrue( Path(parser_test_file).is_file(), f"Unittest of parser '{parser_test_file}' is missing or using different name", @@ -64,7 +64,7 @@ def test_file_existence(self): "wizcli_common_parsers", # common class for other wizcli parsers ]: with self.subTest(parser=parser_dir.name, category="testfiles"): - scan_dir = os.path.join(basedir, "unittests", "scans", parser_dir.name) + scan_dir = Path(basedir) / "unittests" / "scans" / parser_dir.name self.assertTrue( Path(scan_dir).is_dir(), f"Test files for unittest of parser '{scan_dir}' are missing or using different name", @@ -76,14 +76,14 @@ def test_file_existence(self): "api_vulners", # TODO: tests should be implemented also for this parser ]: with self.subTest(parser=parser_dir.name, category="importer"): - importer_test_file = os.path.join(basedir, "unittests", "tools", f"test_{parser_dir.name}_importer.py") + importer_test_file = Path(basedir) / "unittests" / "tools" / f"test_{parser_dir.name}_importer.py" self.assertTrue( Path(importer_test_file).is_file(), f"Unittest of importer '{importer_test_file}' is missing or using different name", ) - for file in os.scandir(os.path.join(basedir, "dojo", "tools", parser_dir.name)): + for file in os.scandir(Path(basedir) / "dojo" / "tools" / parser_dir.name): if file.is_file() and file.name != "__pycache__" and file.name != "__init__.py": - f = os.path.join(basedir, "dojo", "tools", parser_dir.name, file.name) + f = Path(basedir) / "dojo" / "tools" / parser_dir.name / file.name read_true = False with open(f, encoding="utf-8") as f: i = 0 @@ -93,7 +93,7 @@ def test_file_existence(self): read_true = False i = 0 elif i > 4: - self.assertTrue(expr=False, msg="In file " + str(os.path.join("dojo", "tools", parser_dir.name, file.name)) + " the test is failing because you don't have utf-8 after .read()") + self.assertTrue(expr=False, msg="In file " + str(Path("dojo") / "tools" / parser_dir.name / file.name) + " the test is failing because you don't have utf-8 after .read()") i = 0 read_true = False else: @@ -103,12 +103,12 @@ def test_file_existence(self): i = 0 def test_parser_existence(self): - for docs in os.scandir(os.path.join(basedir, "docs", "content", "en", "connecting_your_tools", "parsers", "file")): + for docs in os.scandir(Path(basedir) / "docs" / "content" / "en" / "connecting_your_tools" / "parsers" / "file"): if docs.name not in [ "_index.md", "codeql.md", "edgescan.md", ]: with self.subTest(parser=docs.name.split(".md")[0], category="parser"): - parser = os.path.join(basedir, "dojo", "tools", f"{docs.name.split('.md')[0]}", "parser.py") + parser = Path(basedir) / "dojo" / "tools" / f"{docs.name.split('.md')[0]}" / "parser.py" self.assertTrue( Path(parser).is_file(), f"Parser '{parser}' is missing or using different name", diff --git a/unittests/tools/test_anchore_enterprise_parser.py b/unittests/tools/test_anchore_enterprise_parser.py index 6025fb736a..e25a97cf8e 100644 --- a/unittests/tools/test_anchore_enterprise_parser.py +++ b/unittests/tools/test_anchore_enterprise_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,19 +7,19 @@ class TestAnchoreEnterpriseParser(DojoTestCase): def test_anchore_policy_check_parser_has_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/anchore_enterprise/no_checks.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/anchore_enterprise/no_checks.json", encoding="utf-8") as testfile: parser = AnchoreEnterpriseParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_anchore_policy_check_parser_has_one_finding(self): - with open(path.join(Path(__file__).parent, "../scans/anchore_enterprise/one_check.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/anchore_enterprise/one_check.json", encoding="utf-8") as testfile: parser = AnchoreEnterpriseParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) def test_anchore_policy_check_parser_has_multiple_findings(self): - with open(path.join(Path(__file__).parent, "../scans/anchore_enterprise/many_checks.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/anchore_enterprise/many_checks.json", encoding="utf-8") as testfile: parser = AnchoreEnterpriseParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(57, len(findings)) @@ -29,7 +28,7 @@ def test_anchore_policy_check_parser_has_multiple_findings(self): self.assertEqual("CVE-2015-2992", finding.unsaved_vulnerability_ids[0]) def test_anchore_policy_check_parser_invalid_format(self): - with open(path.join(Path(__file__).parent, "../scans/anchore_enterprise/invalid_checks_format.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/anchore_enterprise/invalid_checks_format.json", encoding="utf-8") as testfile: with self.assertRaises(Exception): parser = AnchoreEnterpriseParser() parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_api_cobalt_parser.py b/unittests/tools/test_api_cobalt_parser.py index afb45d902f..3ed186b782 100644 --- a/unittests/tools/test_api_cobalt_parser.py +++ b/unittests/tools/test_api_cobalt_parser.py @@ -268,7 +268,7 @@ def test_cobalt_api_parser_with_wont_fix_finding(self): @patch("dojo.tools.api_cobalt.importer.CobaltApiImporter.get_findings") def test_cobalt_api_parser_with_api(self, mock): - with open(get_unit_tests_path() + "/scans/api_cobalt/cobalt_api_many_vul.json", encoding="utf-8") as api_findings_file: + with open(get_unit_tests_path() / "scans/api_cobalt/cobalt_api_many_vul.json", encoding="utf-8") as api_findings_file: api_findings = json.load(api_findings_file) mock.return_value = api_findings diff --git a/unittests/tools/test_api_sonarqube_importer.py b/unittests/tools/test_api_sonarqube_importer.py index 2c5564fbec..f8ee3870e6 100644 --- a/unittests/tools/test_api_sonarqube_importer.py +++ b/unittests/tools/test_api_sonarqube_importer.py @@ -9,47 +9,47 @@ def dummy_product(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/product.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/product.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_issues(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/issues.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/issues.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_rule(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/rule.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/rule.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_rule_wo_html_desc(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/rule_wo_html_desc.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/rule_wo_html_desc.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_no_hotspot(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/no_vuln.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/no_vuln.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_one_hotspot(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/one_vuln.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/one_vuln.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_many_hotspots(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/many_vulns.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/many_vulns.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_hotspot_rule(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/rule.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/rule.json", encoding="utf-8") as json_file: return json.load(json_file) def dummy_hotspot_rule_wo_risk_description(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/rule_wo_risk_description.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/rule_wo_risk_description.json", encoding="utf-8") as json_file: return json.load(json_file) diff --git a/unittests/tools/test_api_sonarqube_parser.py b/unittests/tools/test_api_sonarqube_parser.py index 176219291a..c6fc2734c1 100644 --- a/unittests/tools/test_api_sonarqube_parser.py +++ b/unittests/tools/test_api_sonarqube_parser.py @@ -30,7 +30,7 @@ def dummy_rule(self, *args, **kwargs): def dummy_hotspot_rule(self, *args, **kwargs): - with open(get_unit_tests_path() + "/scans/api_sonarqube/hotspots/rule.json", encoding="utf-8") as json_file: + with open(get_unit_tests_path() / "scans/api_sonarqube/hotspots/rule.json", encoding="utf-8") as json_file: return json.load(json_file) diff --git a/unittests/tools/test_appspider_parser.py b/unittests/tools/test_appspider_parser.py index 207db2d2d6..08204a7fa9 100644 --- a/unittests/tools/test_appspider_parser.py +++ b/unittests/tools/test_appspider_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Engagement, Finding, Product, Test @@ -11,7 +10,7 @@ def test_appspider_parser_has_one_finding(self): test = Test() test.engagement = Engagement() test.engagement.product = Product() - testfile = open(path.join(Path(__file__).parent, "../scans/appspider/one_vuln.xml"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/appspider/one_vuln.xml", encoding="utf-8") parser = AppSpiderParser() findings = parser.get_findings(testfile, test) for finding in findings: diff --git a/unittests/tools/test_asff_parser.py b/unittests/tools/test_asff_parser.py index fe01bb06cf..2887050623 100644 --- a/unittests/tools/test_asff_parser.py +++ b/unittests/tools/test_asff_parser.py @@ -1,5 +1,4 @@ import json -import os.path from datetime import datetime from dojo.models import Endpoint, Test @@ -8,7 +7,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path(), "scans/asff", file_name) + return get_unit_tests_path() / "scans/asff" / file_name class TestAsffParser(DojoTestCase): diff --git a/unittests/tools/test_awssecurityhub_parser.py b/unittests/tools/test_awssecurityhub_parser.py index 5885852b34..8ae82f0abf 100644 --- a/unittests/tools/test_awssecurityhub_parser.py +++ b/unittests/tools/test_awssecurityhub_parser.py @@ -1,4 +1,5 @@ -import os.path + +from pathlib import Path from dojo.models import Test from dojo.tools.awssecurityhub.parser import AwsSecurityHubParser @@ -6,13 +7,13 @@ def sample_path(file_name: str): - return os.path.join("/scans/awssecurityhub", file_name) + return Path("/scans/awssecurityhub") / file_name class TestAwsSecurityHubParser(DojoTestCase): def test_one_finding(self): - with open(get_unit_tests_path() + sample_path("config_one_finding.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("config_one_finding.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) @@ -23,7 +24,7 @@ def test_one_finding(self): self.assertEqual("https://docs.aws.amazon.com/console/securityhub/IAM.5/remediation", finding.references) def test_one_finding_active(self): - with open(get_unit_tests_path() + sample_path("config_one_finding_active.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("config_one_finding_active.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) @@ -33,7 +34,7 @@ def test_one_finding_active(self): self.assertTrue(finding.active) def test_many_findings(self): - with open(get_unit_tests_path() + sample_path("config_many_findings.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("config_many_findings.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(3, len(findings)) @@ -42,13 +43,13 @@ def test_many_findings(self): self.assertEqual("This is a Security Hub Finding \nThis AWS control checks whether AWS Multi-Factor Authentication (MFA) is enabled for all AWS Identity and Access Management (IAM) users that use a console password.\n**AWS Finding ARN:** arn:aws:securityhub:us-east-1:012345678912:subscription/aws-foundational-security-best-practices/v/1.0.0/IAM.5/finding/de861909-2d26-4e45-bd86-19d2ab6ceef1\n**Resource IDs:** AWS::::Account:012345678912\n**AwsAccountId:** 012345678912\n**Generator ID:** aws-foundational-security-best-practices/v/1.0.0/IAM.5\n", finding.description) def test_repeated_findings(self): - with open(get_unit_tests_path() + sample_path("config_repeated_findings.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("config_repeated_findings.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) def test_unique_id(self): - with open(get_unit_tests_path() + sample_path("config_one_finding.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("config_one_finding.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual( @@ -57,7 +58,7 @@ def test_unique_id(self): ) def test_inspector_ec2(self): - with open(get_unit_tests_path() + sample_path("inspector_ec2_cve.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("inspector_ec2_cve.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(5, len(findings)) @@ -71,7 +72,7 @@ def test_inspector_ec2(self): self.assertEqual("AwsEc2Instance arn:aws:ec2:us-east-1:XXXXXXXXXXXX:i-11111111111111111", endpoint.host) def test_inspector_ec2_with_no_vulnerabilities(self): - with open(get_unit_tests_path() + sample_path("inspector_ec2_cve_no_vulnerabilities.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("inspector_ec2_cve_no_vulnerabilities.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) @@ -79,7 +80,7 @@ def test_inspector_ec2_with_no_vulnerabilities(self): self.assertEqual(finding.component_name, "AwsEc2Instance") def test_inspector_ec2_ghsa(self): - with open(get_unit_tests_path() + sample_path("inspector_ec2_ghsa.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("inspector_ec2_ghsa.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) @@ -94,7 +95,7 @@ def test_inspector_ec2_ghsa(self): self.assertEqual("AwsEc2Instance arn:aws:ec2:eu-central-1:012345678912:instance/i-07c11cc535d830123", endpoint.host) def test_inspector_ecr(self): - with open(get_unit_tests_path() + sample_path("inspector_ecr.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("inspector_ecr.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(7, len(findings)) @@ -111,7 +112,7 @@ def test_inspector_ecr(self): self.assertEqual("AwsEcrContainerImage arn:aws:ecr:eu-central-1:123456789012:repository/repo-os/sha256:af965ef68c78374a5f987fce98c0ddfa45801df2395bf012c50b863e65978d74", endpoint.host) def test_guardduty(self): - with open(get_unit_tests_path() + sample_path("guardduty.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("guardduty.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(4, len(findings)) @@ -128,7 +129,7 @@ def test_guardduty(self): self.assertEqual("This is a GuardDuty Finding\nAPIs commonly used in Discovery tactics were invoked by user AssumedRole : 123123123, under anomalous circumstances. Such activity is not typically seen from this user.\n**AWS Finding ARN:** arn:aws:guardduty:us-east-1:123456789012:detector/123456789/finding/2123123123123\n**SourceURL:** [https://us-east-1.console.aws.amazon.com/guardduty/home?region=us-east-1#/findings?macros=current&fId=2123123123123](https://us-east-1.console.aws.amazon.com/guardduty/home?region=us-east-1#/findings?macros=current&fId=2123123123123)\n**AwsAccountId:** 123456789012\n**Region:** us-east-1\n**Generator ID:** arn:aws:guardduty:us-east-1:123456789012:detector/123456789\n", finding.description) def test_issue_10956(self): - with open(get_unit_tests_path() + sample_path("issue_10956.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("issue_10956.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) @@ -136,7 +137,7 @@ def test_issue_10956(self): self.assertEqual("0.00239", finding.epss_score) def test_missing_account_id(self): - with open(get_unit_tests_path() + sample_path("missing_account_id.json"), encoding="utf-8") as test_file: + with open(get_unit_tests_path() / sample_path("missing_account_id.json"), encoding="utf-8") as test_file: parser = AwsSecurityHubParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) diff --git a/unittests/tools/test_blackduck_binary_analysis_parser.py b/unittests/tools/test_blackduck_binary_analysis_parser.py index 92d92c111b..90ccbb7ae3 100644 --- a/unittests/tools/test_blackduck_binary_analysis_parser.py +++ b/unittests/tools/test_blackduck_binary_analysis_parser.py @@ -7,13 +7,13 @@ class TestBlackduckBinaryAnalysisParser(DojoTestCase): def test_parse_no_vulns(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck_binary_analysis/no_vuln.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck_binary_analysis/no_vuln.csv") parser = BlackduckBinaryAnalysisParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_one_vuln(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck_binary_analysis/one_vuln.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck_binary_analysis/one_vuln.csv") parser = BlackduckBinaryAnalysisParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -46,7 +46,7 @@ def test_parse_one_vuln(self): self.assertIsNotNone(finding.unique_id_from_tool) def test_parse_many_vulns(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck_binary_analysis/many_vulns.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck_binary_analysis/many_vulns.csv") parser = BlackduckBinaryAnalysisParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(5, len(findings)) diff --git a/unittests/tools/test_blackduck_component_risk_parser.py b/unittests/tools/test_blackduck_component_risk_parser.py index ccb613ce9e..335aeaeecb 100644 --- a/unittests/tools/test_blackduck_component_risk_parser.py +++ b/unittests/tools/test_blackduck_component_risk_parser.py @@ -8,7 +8,7 @@ class TestBlackduckComponentRiskParser(DojoTestCase): def test_blackduck_enhanced_zip_upload(self): testfile = Path( - get_unit_tests_path() + "/scans/blackduck_component_risk/" + get_unit_tests_path() / "scans/blackduck_component_risk/" "blackduck_hub_component_risk.zip", ) parser = BlackduckComponentRiskParser() diff --git a/unittests/tools/test_blackduck_parser.py b/unittests/tools/test_blackduck_parser.py index d2d16c6942..30a95ecaaa 100644 --- a/unittests/tools/test_blackduck_parser.py +++ b/unittests/tools/test_blackduck_parser.py @@ -7,19 +7,19 @@ class TestBlackduckHubParser(DojoTestCase): def test_blackduck_csv_parser_has_no_finding(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck/no_vuln.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck/no_vuln.csv") parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_blackduck_csv_parser_has_one_finding(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck/one_vuln.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck/one_vuln.csv") parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) def test_blackduck_csv_parser_has_many_findings(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck/many_vulns.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck/many_vulns.csv") parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(24, len(findings)) @@ -32,7 +32,7 @@ def test_blackduck_csv_parser_has_many_findings(self): self.assertEqual(findings[2].component_version, "4.5.2") def test_blackduck_csv_parser_new_format_has_many_findings(self): - testfile = Path(get_unit_tests_path() + "/scans/blackduck/many_vulns_new_format.csv") + testfile = Path(get_unit_tests_path() / "scans/blackduck/many_vulns_new_format.csv") parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) findings = list(findings) @@ -44,7 +44,7 @@ def test_blackduck_csv_parser_new_format_has_many_findings(self): def test_blackduck_enhanced_has_many_findings(self): testfile = Path( - get_unit_tests_path() + "/scans/blackduck/blackduck_enhanced_py3_unittest.zip", + get_unit_tests_path() / "scans/blackduck/blackduck_enhanced_py3_unittest.zip", ) parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) @@ -52,7 +52,7 @@ def test_blackduck_enhanced_has_many_findings(self): def test_blackduck_enhanced_zip_upload(self): testfile = Path( - get_unit_tests_path() + "/scans/blackduck/blackduck_enhanced_py3_unittest_v2.zip", + get_unit_tests_path() / "scans/blackduck/blackduck_enhanced_py3_unittest_v2.zip", ) parser = BlackduckParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_bundler_audit_parser.py b/unittests/tools/test_bundler_audit_parser.py index 2fa4b4bc31..1391aa66b9 100644 --- a/unittests/tools/test_bundler_audit_parser.py +++ b/unittests/tools/test_bundler_audit_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,7 +7,7 @@ class TestBundlerAuditParser(DojoTestCase): def test_get_findings(self): - with open(path.join(Path(__file__).parent, "../scans/bundler_audit/bundler-audit_v0.6.1.txt"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/bundler_audit/bundler-audit_v0.6.1.txt", encoding="utf-8") as testfile: parser = BundlerAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -30,7 +29,7 @@ def test_get_findings(self): self.assertEqual("2.2.3", finding.component_version) def test_get_findings_version9(self): - with open(path.join(Path(__file__).parent, "../scans/bundler_audit/version_9.0.txt"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/bundler_audit/version_9.0.txt", encoding="utf-8") as testfile: parser = BundlerAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(4, len(findings)) diff --git a/unittests/tools/test_burp_api_parser.py b/unittests/tools/test_burp_api_parser.py index af34ecac7a..0b806c4e48 100644 --- a/unittests/tools/test_burp_api_parser.py +++ b/unittests/tools/test_burp_api_parser.py @@ -6,7 +6,7 @@ class TestParser(DojoTestCase): def test_example_report(self): - testfile = get_unit_tests_path() + "/scans/burp_api/example.json" + testfile = get_unit_tests_path() / "scans/burp_api/example.json" with open(testfile, encoding="utf-8") as f: parser = BurpApiParser() findings = parser.get_findings(f, Test()) @@ -24,7 +24,7 @@ def test_example_report(self): self.assertIsNotNone(item.impact) def test_validate_more(self): - testfile = get_unit_tests_path() + "/scans/burp_api/many_vulns.json" + testfile = get_unit_tests_path() / "scans/burp_api/many_vulns.json" with open(testfile, encoding="utf-8") as f: parser = BurpApiParser() findings = parser.get_findings(f, Test()) @@ -61,7 +61,7 @@ def test_convert_confidence(self): self.assertIsNone(convert_confidence({})) def test_fix_issue_9128(self): - testfile = get_unit_tests_path() + "/scans/burp_api/fix_issue_9128.json" + testfile = get_unit_tests_path() / "scans/burp_api/fix_issue_9128.json" with open(testfile, encoding="utf-8") as f: parser = BurpApiParser() findings = parser.get_findings(f, Test()) diff --git a/unittests/tools/test_burp_dastardly_parser.py b/unittests/tools/test_burp_dastardly_parser.py index 8bf959b23c..eb8c2c9876 100644 --- a/unittests/tools/test_burp_dastardly_parser.py +++ b/unittests/tools/test_burp_dastardly_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,7 +8,7 @@ class TestBurpParser(DojoTestCase): def test_burp_dastardly_multiple_findings(self): - with open(path.join(Path(__file__).parent, "../scans/burp_dastardly/many_findings.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_dastardly/many_findings.xml", encoding="utf-8") as test_file: parser = BurpDastardlyParser() findings = parser.get_findings(test_file, Test()) for finding in findings: diff --git a/unittests/tools/test_burp_enterprise_parser.py b/unittests/tools/test_burp_enterprise_parser.py index cc0ce744e0..6ed2c2a4b9 100644 --- a/unittests/tools/test_burp_enterprise_parser.py +++ b/unittests/tools/test_burp_enterprise_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,7 +8,7 @@ class TestBurpEnterpriseParser(DojoTestCase): def test_burp_enterprise_with_multiple_vulns(self): - with open(path.join(Path(__file__).parent, "../scans/burp_enterprise/many_vulns.html"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_enterprise/many_vulns.html", encoding="utf-8") as test_file: parser = BurpEnterpriseParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -38,7 +37,7 @@ def test_burp_enterprise_with_multiple_vulns(self): self.assertIn("**Issue detail**:\nFingerprint Details:\n\nWAF Type : redacted\nWAF tech. details : Cloud-based CDN, WAF & DDoS prevention", finding.description) def test_burp_enterprise_with_multiple_vulns_newer_format(self): - with open(path.join(Path(__file__).parent, "../scans/burp_enterprise/many_vulns_updated_format.html"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_enterprise/many_vulns_updated_format.html", encoding="utf-8") as test_file: parser = BurpEnterpriseParser() findings = parser.get_findings(test_file, Test()) for finding in findings: diff --git a/unittests/tools/test_burp_graphql_parser.py b/unittests/tools/test_burp_graphql_parser.py index e163c52fea..c4736caf8c 100644 --- a/unittests/tools/test_burp_graphql_parser.py +++ b/unittests/tools/test_burp_graphql_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,7 +8,7 @@ class TestBurpGraphQLParser(DojoTestCase): def test_burp_one_finding(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/one_finding.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/one_finding.json", encoding="utf-8") as test_file: parser = BurpGraphQLParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -34,7 +33,7 @@ def test_burp_one_finding(self): self.assertIn("CWE-79", findings[0].references) def test_burp_two_findings(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/two_findings.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/two_findings.json", encoding="utf-8") as test_file: parser = BurpGraphQLParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -50,27 +49,27 @@ def test_burp_two_findings(self): self.assertIn("description 3", findings[1].description) def test_burp_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/no_findings.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/no_findings.json", encoding="utf-8") as test_file: parser = BurpGraphQLParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(0, len(findings)) def test_burp_null_title(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/null_title.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/null_title.json", encoding="utf-8") as test_file: with self.assertRaises(ValueError): parser = BurpGraphQLParser() parser.get_findings(test_file, Test()) def test_burp_null_request_segments(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/null_request_segments.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/null_request_segments.json", encoding="utf-8") as test_file: parser = BurpGraphQLParser() findings = parser.get_findings(test_file, Test()) self.assertEqual(1, len(findings)) def test_burp_null_data(self): - with open(path.join(Path(__file__).parent, "../scans/burp_graphql/null_data.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp_graphql/null_data.json", encoding="utf-8") as test_file: parser = BurpGraphQLParser() findings = parser.get_findings(test_file, Test()) for finding in findings: diff --git a/unittests/tools/test_burp_parser.py b/unittests/tools/test_burp_parser.py index a75ea4f914..55289c2e74 100644 --- a/unittests/tools/test_burp_parser.py +++ b/unittests/tools/test_burp_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,7 +8,7 @@ class TestBurpParser(DojoTestCase): def test_burp_with_one_vuln_has_one_finding(self): - with open(path.join(Path(__file__).parent, "../scans/burp/one_finding.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp/one_finding.xml", encoding="utf-8") as test_file: parser = BurpParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -21,7 +20,7 @@ def test_burp_with_one_vuln_has_one_finding(self): self.assertEqual(3, len(findings[0].unsaved_endpoints)) def test_burp_with_multiple_vulns_has_multiple_findings(self): - with open(path.join(Path(__file__).parent, "../scans/burp/seven_findings.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp/seven_findings.xml", encoding="utf-8") as test_file: parser = BurpParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -35,7 +34,7 @@ def test_burp_with_multiple_vulns_has_multiple_findings(self): self.assertEqual("Frameable response (potential Clickjacking)", finding.title) def test_burp_with_one_vuln_with_blank_response(self): - with open(path.join(Path(__file__).parent, "../scans/burp/one_finding_with_blank_response.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp/one_finding_with_blank_response.xml", encoding="utf-8") as test_file: parser = BurpParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -51,7 +50,7 @@ def test_burp_with_one_vuln_with_blank_response(self): self.assertEqual("High", findings[0].severity) def test_burp_with_one_vuln_with_cwe(self): - with open(path.join(Path(__file__).parent, "../scans/burp/one_finding_with_cwe.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp/one_finding_with_cwe.xml", encoding="utf-8") as test_file: parser = BurpParser() findings = parser.get_findings(test_file, Test()) for finding in findings: @@ -67,7 +66,7 @@ def test_burp_with_one_vuln_with_cwe(self): self.assertEqual("Info", findings[0].severity) def test_burp_issue4399(self): - with open(path.join(Path(__file__).parent, "../scans/burp/issue4399.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/burp/issue4399.xml", encoding="utf-8") as test_file: parser = BurpParser() findings = parser.get_findings(test_file, Test()) for finding in findings: diff --git a/unittests/tools/test_checkmarx_osa_parser.py b/unittests/tools/test_checkmarx_osa_parser.py index 74592b5124..f05483d28d 100644 --- a/unittests/tools/test_checkmarx_osa_parser.py +++ b/unittests/tools/test_checkmarx_osa_parser.py @@ -28,7 +28,7 @@ def test_checkmarx_osa_parse_file_with_no_vulnerabilities_has_no_findings( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/no_finding.json", + get_unit_tests_path() / "scans/checkmarx_osa/no_finding.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -42,7 +42,7 @@ def test_checkmarx_osa_parse_file_with_single_vulnerability_has_single_finding( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -94,7 +94,7 @@ def test_checkmarx_osa_parse_file_with_false_positive_is_false_positive( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_false_positive.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding_false_positive.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -115,7 +115,7 @@ def test_checkmarx_osa_parse_file_with_confirmed_is_verified( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_confirmed.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding_confirmed.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -136,7 +136,7 @@ def test_checkmarx_osa_parse_file_with_multiple_findings( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/multiple_findings.json", + get_unit_tests_path() / "scans/checkmarx_osa/multiple_findings.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -150,7 +150,7 @@ def test_checkmarx_osa_parse_file_with_no_score( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_score.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding_no_score.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -166,7 +166,7 @@ def test_checkmarx_osa_parse_file_with_no_url( self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_url.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding_no_url.json", ) parser = CheckmarxOsaParser() findings = parser.get_findings(my_file_handle, test) @@ -183,7 +183,7 @@ def test_checkmarx_osa_parse_file_with_no_libraryId_raises_ValueError( ): with self.assertRaises(ValueError) as context: my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx_osa/single_finding_no_libraryId.json", + get_unit_tests_path() / "scans/checkmarx_osa/single_finding_no_libraryId.json", ) with my_file_handle: parser = CheckmarxOsaParser() diff --git a/unittests/tools/test_checkmarx_parser.py b/unittests/tools/test_checkmarx_parser.py index 322b28faa3..a75010d23b 100644 --- a/unittests/tools/test_checkmarx_parser.py +++ b/unittests/tools/test_checkmarx_parser.py @@ -30,7 +30,7 @@ def teardown(self, my_file_handle): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings(self, mock): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/no_finding.xml", + get_unit_tests_path() / "scans/checkmarx/no_finding.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -41,7 +41,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings def test_detailed_parse_file_with_no_vulnerabilities_has_no_findings(self, mock): """Checkmarx detailed scanner, with all vulnerabilities from checkmarx""" my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/no_finding.xml", + get_unit_tests_path() / "scans/checkmarx/no_finding.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -52,7 +52,7 @@ def test_detailed_parse_file_with_no_vulnerabilities_has_no_findings(self, mock) @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_single_vulnerability_has_single_finding(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding.xml", + get_unit_tests_path() / "scans/checkmarx/single_finding.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -80,7 +80,7 @@ def test_file_name_aggregated_parse_file_with_single_vulnerability_has_single_fi @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_single_vulnerability_has_single_finding(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding.xml", + get_unit_tests_path() / "scans/checkmarx/single_finding.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -214,7 +214,7 @@ def check_parse_file_with_single_vulnerability_has_single_finding(self, findings @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_false_positive_is_false_positive(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding_false_positive.xml", + get_unit_tests_path() / "scans/checkmarx/single_finding_false_positive.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -226,7 +226,7 @@ def test_file_name_aggregated_parse_file_with_false_positive_is_false_positive(s @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_false_positive_is_false_positive(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding_false_positive.xml", + get_unit_tests_path() / "scans/checkmarx/single_finding_false_positive.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -255,7 +255,7 @@ def check_parse_file_with_false_positive_is_false_positive(self, findings): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_two_aggregated_findings_one_is_false_p(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/two_aggregated_findings_one_is_false_positive.xml", + get_unit_tests_path() / "scans/checkmarx/two_aggregated_findings_one_is_false_positive.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -281,7 +281,7 @@ def test_file_name_aggregated_parse_file_with_two_aggregated_findings_one_is_fal @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multiple_findings(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -300,7 +300,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -323,7 +323,7 @@ def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_different_sourceFilename_same_sinkFilename_is_aggregated(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings_different_sourceFilename_same_sinkFilename.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings_different_sourceFilename_same_sinkFilename.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -337,7 +337,7 @@ def test_file_name_aggregated_parse_file_with_different_sourceFilename_same_sink @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_different_sourceFilename_same_sinkFilename_is_not_aggregated(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings_different_sourceFilename_same_sinkFilename.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings_different_sourceFilename_same_sinkFilename.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -354,7 +354,7 @@ def test_detailed_parse_file_with_different_sourceFilename_same_sinkFilename_is_ @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_same_sourceFilename_different_sinkFilename_is_not_aggregated(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings_same_sourceFilename_different_sinkFilename.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings_same_sourceFilename_different_sinkFilename.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -366,7 +366,7 @@ def test_file_name_aggregated_parse_file_with_same_sourceFilename_different_sink @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_same_sourceFilename_different_sinkFilename_is_not_aggregated(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings_same_sourceFilename_different_sinkFilename.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings_same_sourceFilename_different_sinkFilename.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -381,7 +381,7 @@ def test_detailed_parse_file_with_same_sourceFilename_different_sinkFilename_is_ @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_utf8_replacement_char(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/utf8_replacement_char.xml", + get_unit_tests_path() / "scans/checkmarx/utf8_replacement_char.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -409,7 +409,7 @@ def test_file_name_aggregated_parse_file_with_utf8_replacement_char(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_utf8_replacement_char(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/utf8_replacement_char.xml", + get_unit_tests_path() / "scans/checkmarx/utf8_replacement_char.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -527,7 +527,7 @@ def check_parse_file_with_utf8_replacement_char(self, findings): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_name_aggregated_parse_file_with_utf8_various_non_ascii_char(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/utf8_various_non_ascii_char.xml", + get_unit_tests_path() / "scans/checkmarx/utf8_various_non_ascii_char.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -555,7 +555,7 @@ def test_file_name_aggregated_parse_file_with_utf8_various_non_ascii_char(self, @patch("dojo.tools.checkmarx.parser.add_language") def test_detailed_parse_file_with_utf8_various_non_ascii_char(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/utf8_various_non_ascii_char.xml", + get_unit_tests_path() / "scans/checkmarx/utf8_various_non_ascii_char.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -673,7 +673,7 @@ def check_parse_file_with_utf8_various_non_ascii_char(self, findings): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_with_multiple_findings_is_aggregated_with_query_id(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings_same_query_id.xml", + get_unit_tests_path() / "scans/checkmarx/multiple_findings_same_query_id.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -693,7 +693,7 @@ def test_file_with_multiple_findings_is_aggregated_with_query_id(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_with_empty_filename(self, mock): my_file_handle, product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_no_filename.xml", + get_unit_tests_path() / "scans/checkmarx/single_no_filename.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -713,7 +713,7 @@ def test_file_with_empty_filename(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_with_many_aggregated_findings(self, mock): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/many_aggregated_findings.xml", + get_unit_tests_path() / "scans/checkmarx/many_aggregated_findings.xml", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, test) @@ -730,7 +730,7 @@ def test_file_with_many_aggregated_findings(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_with_many_findings_json(self, mock): my_file_handle, _product, _engagement, _test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings.json", + get_unit_tests_path() / "scans/checkmarx/multiple_findings.json", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, Test()) @@ -764,7 +764,7 @@ def test_file_with_many_findings_json(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_file_issue6956(self, mock): my_file_handle, _product, _engagement, _test = self.init( - get_unit_tests_path() + "/scans/checkmarx/sample_report.json", + get_unit_tests_path() / "scans/checkmarx/sample_report.json", ) parser = CheckmarxParser() findings = parser.get_findings(my_file_handle, Test()) @@ -827,7 +827,7 @@ def test_file_issue6956(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_finding_date_should_be_date_xml(self, mock): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/single_finding.xml", + get_unit_tests_path() / "scans/checkmarx/single_finding.xml", ) parser = CheckmarxParser() parser.set_mode("detailed") @@ -838,7 +838,7 @@ def test_finding_date_should_be_date_xml(self, mock): @patch("dojo.tools.checkmarx.parser.add_language") def test_finding_date_should_be_date_json(self, mock): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/checkmarx/multiple_findings.json", + get_unit_tests_path() / "scans/checkmarx/multiple_findings.json", ) parser = CheckmarxParser() parser.set_mode("detailed") diff --git a/unittests/tools/test_codechecker_parser.py b/unittests/tools/test_codechecker_parser.py index 4f81bca457..6576e4e0b1 100644 --- a/unittests/tools/test_codechecker_parser.py +++ b/unittests/tools/test_codechecker_parser.py @@ -7,7 +7,7 @@ class TestCodeCheckerParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/codechecker/cc-report-0-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/codechecker/cc-report-0-vuln.json", encoding="utf-8", ) as testfile: parser = CodeCheckerParser() findings = parser.get_findings(testfile, Test()) @@ -15,7 +15,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): def test_parse_file_with_one_vuln_has_one_finding(self): with open( - get_unit_tests_path() + "/scans/codechecker/cc-report-1-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/codechecker/cc-report-1-vuln.json", encoding="utf-8", ) as testfile: parser = CodeCheckerParser() findings = parser.get_findings(testfile, Test()) @@ -33,7 +33,7 @@ def test_parse_file_with_one_vuln_has_one_finding(self): def test_parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/codechecker/cc-report-many-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/codechecker/cc-report-many-vuln.json", encoding="utf-8", ) as testfile: parser = CodeCheckerParser() findings = parser.get_findings(testfile, Test()) @@ -60,7 +60,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings(self): def test_parse_file_with_various_review_statuses(self): with open( - get_unit_tests_path() + "/scans/codechecker/cc-report-review-status.json", encoding="utf-8", + get_unit_tests_path() / "scans/codechecker/cc-report-review-status.json", encoding="utf-8", ) as testfile: parser = CodeCheckerParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_crashtest_security_parser.py b/unittests/tools/test_crashtest_security_parser.py index 88aa859cad..b912032413 100644 --- a/unittests/tools/test_crashtest_security_parser.py +++ b/unittests/tools/test_crashtest_security_parser.py @@ -25,7 +25,7 @@ def test_crashtest_security_json_parser_full_file_has_many_findings(self): def test_crashtest_security_json_parser_extracted_data_file_has_many_findings(self): testfile = open( - get_unit_tests_path() + "/scans/crashtest_security/data_extracted.json", encoding="utf-8", + get_unit_tests_path() / "scans/crashtest_security/data_extracted.json", encoding="utf-8", ) parser = CrashtestSecurityParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_dawnscanner_parser.py b/unittests/tools/test_dawnscanner_parser.py index 5b7a161cc9..4bf585f982 100644 --- a/unittests/tools/test_dawnscanner_parser.py +++ b/unittests/tools/test_dawnscanner_parser.py @@ -1,5 +1,4 @@ import datetime -from os import path from pathlib import Path from dojo.models import Test @@ -9,7 +8,7 @@ class TestDawnScannerParser(DojoTestCase): def test_burp_with_one_vuln_has_one_finding(self): - with open(path.join(Path(__file__).parent, "../scans/dawnscanner/dawnscanner_v1.6.9.json"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/dawnscanner/dawnscanner_v1.6.9.json", encoding="utf-8") as test_file: parser = DawnScannerParser() findings = parser.get_findings(test_file, Test()) for finding in findings: diff --git a/unittests/tools/test_dependency_check_parser.py b/unittests/tools/test_dependency_check_parser.py index 4ad945a39c..88c402cd5f 100644 --- a/unittests/tools/test_dependency_check_parser.py +++ b/unittests/tools/test_dependency_check_parser.py @@ -1,6 +1,5 @@ import logging from datetime import UTC, datetime -from os import path from pathlib import Path from dateutil.tz import tzlocal, tzoffset @@ -256,7 +255,7 @@ def test_parse_file_with_multiple_vulnerabilities_has_multiple_findings(self): def test_parse_java_6_5_3(self): """Test with version 6.5.3""" - with open(path.join(Path(__file__).parent, "../scans/dependency_check/version-6.5.3.xml"), encoding="utf-8") as test_file: + with open(Path(__file__).parent / "../scans/dependency_check/version-6.5.3.xml", encoding="utf-8") as test_file: parser = DependencyCheckParser() findings = parser.get_findings(test_file, Test()) items = findings diff --git a/unittests/tools/test_dependency_track_parser.py b/unittests/tools/test_dependency_track_parser.py index 60db4cedc3..010c2b1fe2 100644 --- a/unittests/tools/test_dependency_track_parser.py +++ b/unittests/tools/test_dependency_track_parser.py @@ -7,7 +7,7 @@ class TestDependencyTrackParser(DojoTestCase): def test_dependency_track_parser_with_empty_list_for_findings_key_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/dependency_track/no_findings_because_findings_key_is_empty_list.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/no_findings_because_findings_key_is_empty_list.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -15,7 +15,7 @@ def test_dependency_track_parser_with_empty_list_for_findings_key_has_no_finding def test_dependency_track_parser_with_missing_findings_key_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/dependency_track/no_findings_because_findings_key_is_missing.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/no_findings_because_findings_key_is_missing.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -23,7 +23,7 @@ def test_dependency_track_parser_with_missing_findings_key_has_no_findings(self) def test_dependency_track_parser_with_null_findings_key_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/dependency_track/no_findings_because_findings_key_is_null.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/no_findings_because_findings_key_is_null.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -31,7 +31,7 @@ def test_dependency_track_parser_with_null_findings_key_has_no_findings(self): def test_dependency_track_parser_has_many_findings(self): with open( - get_unit_tests_path() + "/scans/dependency_track/many_findings.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/many_findings.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -49,7 +49,7 @@ def test_dependency_track_parser_has_many_findings(self): def test_dependency_track_parser_has_one_finding(self): with open( - get_unit_tests_path() + "/scans/dependency_track/one_finding.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/one_finding.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -57,7 +57,7 @@ def test_dependency_track_parser_has_one_finding(self): def test_dependency_track_parser_v3_8_0(self): with open( - get_unit_tests_path() + "/scans/dependency_track/dependency_track_3.8.0_2021-01-18.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/dependency_track_3.8.0_2021-01-18.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -67,7 +67,7 @@ def test_dependency_track_parser_v3_8_0(self): def test_dependency_track_parser_findings_with_alias(self): with open( - get_unit_tests_path() + "/scans/dependency_track/many_findings_with_alias.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/many_findings_with_alias.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) @@ -79,7 +79,7 @@ def test_dependency_track_parser_findings_with_alias(self): def test_dependency_track_parser_findings_with_empty_alias(self): with open( - get_unit_tests_path() + "/scans/dependency_track/many_findings_with_empty_alias.json", encoding="utf-8", + get_unit_tests_path() / "scans/dependency_track/many_findings_with_empty_alias.json", encoding="utf-8", ) as testfile: parser = DependencyTrackParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_dockerbench_parser.py b/unittests/tools/test_dockerbench_parser.py index 22c0d66f28..55fe81eb2b 100644 --- a/unittests/tools/test_dockerbench_parser.py +++ b/unittests/tools/test_dockerbench_parser.py @@ -7,7 +7,7 @@ class TestDockerBenchParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/dockerbench/docker-bench-report-zero-vulns.json", encoding="utf-8", + get_unit_tests_path() / "scans/dockerbench/docker-bench-report-zero-vulns.json", encoding="utf-8", ) as testfile: parser = DockerBenchParser() findings = parser.get_findings(testfile, Test()) @@ -15,7 +15,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): def test_parse_file_with_one_vuln_has_one_finding(self): with open( - get_unit_tests_path() + "/scans/dockerbench/docker-bench-report-single-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/dockerbench/docker-bench-report-single-vuln.json", encoding="utf-8", ) as testfile: parser = DockerBenchParser() findings = parser.get_findings(testfile, Test()) @@ -29,7 +29,7 @@ def test_parse_file_with_one_vuln_has_one_finding(self): def test_parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/dockerbench/docker-bench-report-many-vulns.json", encoding="utf-8", + get_unit_tests_path() / "scans/dockerbench/docker-bench-report-many-vulns.json", encoding="utf-8", ) as testfile: parser = DockerBenchParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_gitleaks_parser.py b/unittests/tools/test_gitleaks_parser.py index 1b70f854dd..c2762ceb78 100644 --- a/unittests/tools/test_gitleaks_parser.py +++ b/unittests/tools/test_gitleaks_parser.py @@ -6,13 +6,13 @@ class TestGitleaksParser(DojoTestCase): def test_parse_file_legacy_with_no_findings(self): - with open(get_unit_tests_path() + "/scans/gitleaks/no_findings.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/no_findings.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_file_legacy_with_one_finding(self): - with open(get_unit_tests_path() + "/scans/gitleaks/data_one.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/data_one.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -24,7 +24,7 @@ def test_parse_file_legacy_with_one_finding(self): self.assertIn("AsymmetricPrivateKey", finding.unsaved_tags) def test_parse_file_legacy_with_multiple_finding(self): - with open(get_unit_tests_path() + "/scans/gitleaks/data_many.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/data_many.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) @@ -36,13 +36,13 @@ def test_parse_file_legacy_with_multiple_finding(self): self.assertIn("Github", finding.unsaved_tags) def test_parse_file_legacy_with_multiple_redacted_finding(self): - with open(get_unit_tests_path() + "/scans/gitleaks/redacted_data_many.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/redacted_data_many.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(6, len(findings)) def test_parse_file_legacy_from_issue4336(self): - with open(get_unit_tests_path() + "/scans/gitleaks/issue4336.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/issue4336.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -53,7 +53,7 @@ def test_parse_file_legacy_from_issue4336(self): self.assertEqual(23, finding.line) def test_parse_file_from_version_7_5_0(self): - with open(get_unit_tests_path() + "/scans/gitleaks/version_7.5.0.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/version_7.5.0.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(4, len(findings)) @@ -84,7 +84,7 @@ def test_parse_file_from_version_7_5_0(self): self.assertIn("AWS", finding.unsaved_tags) def test_parse_file_from_version_8(self): - with open(get_unit_tests_path() + "/scans/gitleaks/gitleaks8_many.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/gitleaks/gitleaks8_many.json", encoding="utf-8") as testfile: parser = GitleaksParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) diff --git a/unittests/tools/test_horusec_parser.py b/unittests/tools/test_horusec_parser.py index b2a39b75ca..45d4c603da 100644 --- a/unittests/tools/test_horusec_parser.py +++ b/unittests/tools/test_horusec_parser.py @@ -1,5 +1,4 @@ import datetime -from os import path from pathlib import Path from dojo.models import Test @@ -10,7 +9,7 @@ class TestHorusecParser(DojoTestCase): def test_get_findings(self): """Version 2.6.3 with big project in Python""" - with open(path.join(Path(__file__).parent, "../scans/horusec/version_2.6.3.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/version_2.6.3.json", encoding="utf-8") as testfile: parser = HorusecParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(267, len(findings)) @@ -18,7 +17,7 @@ def test_get_findings(self): def test_get_tests(self): """Version 2.6.3 with big project in Python""" - with open(path.join(Path(__file__).parent, "../scans/horusec/version_2.6.3.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/version_2.6.3.json", encoding="utf-8") as testfile: parser = HorusecParser() tests = parser.get_tests("Horusec Scan", testfile) self.assertEqual(1, len(tests)) @@ -50,7 +49,7 @@ def test_get_tests(self): def test_get_tests_ok(self): """Version 2.6.3 with big project in Python""" - with open(path.join(Path(__file__).parent, "../scans/horusec/horres3.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/horres3.json", encoding="utf-8") as testfile: parser = HorusecParser() tests = parser.get_tests("Horusec Scan", testfile) self.assertEqual(1, len(tests)) @@ -82,7 +81,7 @@ def test_get_tests_ok(self): def test_get_tests_issue_6258(self): """""" - with open(path.join(Path(__file__).parent, "../scans/horusec/issue_6258.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/issue_6258.json", encoding="utf-8") as testfile: parser = HorusecParser() tests = parser.get_tests("Horusec Scan", testfile) self.assertEqual(1, len(tests)) @@ -118,7 +117,7 @@ def test_get_tests_issue_6258(self): def test_get_tests_pr_6563(self): """""" - with open(path.join(Path(__file__).parent, "../scans/horusec/pr_6563.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/pr_6563.json", encoding="utf-8") as testfile: parser = HorusecParser() tests = parser.get_tests("Horusec Scan", testfile) self.assertEqual(1, len(tests)) @@ -137,7 +136,7 @@ def test_get_tests_pr_6563(self): def test_issue_9939(self): """""" - with open(path.join(Path(__file__).parent, "../scans/horusec/issue_9939.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/horusec/issue_9939.json", encoding="utf-8") as testfile: parser = HorusecParser() tests = parser.get_tests("Horusec Scan", testfile) self.assertEqual(1, len(tests)) diff --git a/unittests/tools/test_huskyci_parser.py b/unittests/tools/test_huskyci_parser.py index 7c7bb1ad59..272f87e7a2 100644 --- a/unittests/tools/test_huskyci_parser.py +++ b/unittests/tools/test_huskyci_parser.py @@ -13,7 +13,7 @@ def test_parse_file_no_finding(self): def test_parse_file_has_one_finding_one_tool(self): with open( - get_unit_tests_path() + "/scans/huskyci/huskyci_report_one_finding_one_tool.json", encoding="utf-8", + get_unit_tests_path() / "scans/huskyci/huskyci_report_one_finding_one_tool.json", encoding="utf-8", ) as testfile: parser = HuskyCIParser() findings = parser.get_findings(testfile, Test()) @@ -21,7 +21,7 @@ def test_parse_file_has_one_finding_one_tool(self): def test_parse_file_has_many_finding_one_tool(self): with open( - get_unit_tests_path() + "/scans/huskyci/huskyci_report_many_finding_one_tool.json", encoding="utf-8", + get_unit_tests_path() / "scans/huskyci/huskyci_report_many_finding_one_tool.json", encoding="utf-8", ) as testfile: parser = HuskyCIParser() findings = parser.get_findings(testfile, Test()) @@ -29,7 +29,7 @@ def test_parse_file_has_many_finding_one_tool(self): def test_parse_file_has_many_finding_two_tools(self): with open( - get_unit_tests_path() + "/scans/huskyci/huskyci_report_many_finding_two_tools.json", encoding="utf-8", + get_unit_tests_path() / "scans/huskyci/huskyci_report_many_finding_two_tools.json", encoding="utf-8", ) as testfile: parser = HuskyCIParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_kubebench_parser.py b/unittests/tools/test_kubebench_parser.py index 25b77faaf7..4f32377226 100644 --- a/unittests/tools/test_kubebench_parser.py +++ b/unittests/tools/test_kubebench_parser.py @@ -7,7 +7,7 @@ class TestKubeBenchParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/kubebench/kube-bench-report-zero-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/kubebench/kube-bench-report-zero-vuln.json", encoding="utf-8", ) as testfile: parser = KubeBenchParser() findings = parser.get_findings(testfile, Test()) @@ -15,7 +15,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): def test_parse_file_with_one_vuln_has_one_finding(self): with open( - get_unit_tests_path() + "/scans/kubebench/kube-bench-report-one-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/kubebench/kube-bench-report-one-vuln.json", encoding="utf-8", ) as testfile: parser = KubeBenchParser() findings = parser.get_findings(testfile, Test()) @@ -23,7 +23,7 @@ def test_parse_file_with_one_vuln_has_one_finding(self): def test_parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/kubebench/kube-bench-report-many-vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/kubebench/kube-bench-report-many-vuln.json", encoding="utf-8", ) as testfile: parser = KubeBenchParser() findings = parser.get_findings(testfile, Test()) @@ -33,7 +33,7 @@ def test_parse_file_with_controls_tag(self): # The testfile has been derived from https://github.com/kubernetes-sigs/wg-policy-prototypes/blob/master/policy-report/kube-bench-adapter/samples/kube-bench-output.json with open( - get_unit_tests_path() + "/scans/kubebench/kube-bench-controls.json", encoding="utf-8", + get_unit_tests_path() / "scans/kubebench/kube-bench-controls.json", encoding="utf-8", ) as testfile: parser = KubeBenchParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_kubescape_parser.py b/unittests/tools/test_kubescape_parser.py index 346cda0401..4cbbf5f283 100644 --- a/unittests/tools/test_kubescape_parser.py +++ b/unittests/tools/test_kubescape_parser.py @@ -5,19 +5,19 @@ class TestKubescapeParser(DojoTestCase): def test_parse_file_has_many_findings(self): - with open(get_unit_tests_path() + "/scans/kubescape/many_findings.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/kubescape/many_findings.json", encoding="utf-8") as testfile: parser = KubescapeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(349, len(findings)) def test_parse_file_has_many_results(self): - with open(get_unit_tests_path() + "/scans/kubescape/results.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/kubescape/results.json", encoding="utf-8") as testfile: parser = KubescapeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_file_with_a_failure(self): - with open(get_unit_tests_path() + "/scans/kubescape/with_a_failure.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/kubescape/with_a_failure.json", encoding="utf-8") as testfile: parser = KubescapeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) diff --git a/unittests/tools/test_legitify_parser.py b/unittests/tools/test_legitify_parser.py index 66f803258b..b2ad651a71 100644 --- a/unittests/tools/test_legitify_parser.py +++ b/unittests/tools/test_legitify_parser.py @@ -5,7 +5,7 @@ class TestLegitifyParser(DojoTestCase): def test_parse_file_with_many_findings(self): - with open(get_unit_tests_path() + "/scans/legitify/legitify_many_findings.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/legitify/legitify_many_findings.json", encoding="utf-8") as testfile: parser = LegitifyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(16, len(findings)) @@ -21,7 +21,7 @@ def test_parse_file_with_many_findings(self): endpoint.clean() def test_parse_file_with_one_finding(self): - with open(get_unit_tests_path() + "/scans/legitify/legitify_one_finding.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/legitify/legitify_one_finding.json", encoding="utf-8") as testfile: parser = LegitifyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -37,7 +37,7 @@ def test_parse_file_with_one_finding(self): endpoint.clean() def test_parse_file_with_no_findings(self): - with open(get_unit_tests_path() + "/scans/legitify/legitify_no_findings.json", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/legitify/legitify_no_findings.json", encoding="utf-8") as testfile: parser = LegitifyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) diff --git a/unittests/tools/test_mend_parser.py b/unittests/tools/test_mend_parser.py index cd544d503c..8dc8a2b475 100644 --- a/unittests/tools/test_mend_parser.py +++ b/unittests/tools/test_mend_parser.py @@ -30,7 +30,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_finding(self): def test_parse_file_with_multiple_vuln_cli_output(self): with open( - get_unit_tests_path() + "/scans/mend/cli_generated_many_vulns.json", encoding="utf-8", + get_unit_tests_path() / "scans/mend/cli_generated_many_vulns.json", encoding="utf-8", ) as testfile: parser = MendParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_microfocus_webinspect_parser.py b/unittests/tools/test_microfocus_webinspect_parser.py index 07f43bca93..aea571d941 100644 --- a/unittests/tools/test_microfocus_webinspect_parser.py +++ b/unittests/tools/test_microfocus_webinspect_parser.py @@ -10,7 +10,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): test.engagement = Engagement() test.engagement.product = Product() with open( - get_unit_tests_path() + "/scans/microfocus_webinspect/Webinspect_no_vuln.xml", encoding="utf-8", + get_unit_tests_path() / "scans/microfocus_webinspect/Webinspect_no_vuln.xml", encoding="utf-8", ) as testfile: parser = MicrofocusWebinspectParser() findings = parser.get_findings(testfile, test) @@ -21,7 +21,7 @@ def test_parse_file_with_one_vuln_has_one_findings(self): test.engagement = Engagement() test.engagement.product = Product() with open( - get_unit_tests_path() + "/scans/microfocus_webinspect/Webinspect_one_vuln.xml", encoding="utf-8", + get_unit_tests_path() / "scans/microfocus_webinspect/Webinspect_one_vuln.xml", encoding="utf-8", ) as testfile: parser = MicrofocusWebinspectParser() findings = parser.get_findings(testfile, test) @@ -42,7 +42,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_finding(self): test.engagement = Engagement() test.engagement.product = Product() with open( - get_unit_tests_path() + "/scans/microfocus_webinspect/Webinspect_many_vuln.xml", encoding="utf-8", + get_unit_tests_path() / "scans/microfocus_webinspect/Webinspect_many_vuln.xml", encoding="utf-8", )as testfile: parser = MicrofocusWebinspectParser() findings = parser.get_findings(testfile, test) @@ -121,7 +121,7 @@ def test_parse_file_issue7690(self): test.engagement = Engagement() test.engagement.product = Product() with open( - get_unit_tests_path() + "/scans/microfocus_webinspect/issue_7690.xml", encoding="utf-8", + get_unit_tests_path() / "scans/microfocus_webinspect/issue_7690.xml", encoding="utf-8", ) as testfile: parser = MicrofocusWebinspectParser() findings = parser.get_findings(testfile, test) diff --git a/unittests/tools/test_nancy_parser.py b/unittests/tools/test_nancy_parser.py index 5de57ddd2e..a015946c59 100644 --- a/unittests/tools/test_nancy_parser.py +++ b/unittests/tools/test_nancy_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,13 +7,13 @@ class TestNancyParser(DojoTestCase): def test_nancy_parser_with_no_vuln_has_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/nancy/nancy_no_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/nancy/nancy_no_findings.json", encoding="utf-8") as testfile: parser = NancyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_nancy_parser_with_one_vuln_has_one_findings(self): - with open(path.join(Path(__file__).parent, "../scans/nancy/nancy_one_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/nancy/nancy_one_findings.json", encoding="utf-8") as testfile: parser = NancyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -28,7 +27,7 @@ def test_nancy_parser_with_one_vuln_has_one_findings(self): self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N", finding.cvssv3) def test_nancy_plus_parser_with_many_vuln_has_many_findings(self): - with open(path.join(Path(__file__).parent, "../scans/nancy/nancy_many_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/nancy/nancy_many_findings.json", encoding="utf-8") as testfile: parser = NancyParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(13, len(findings)) diff --git a/unittests/tools/test_neuvector_compliance_parser.py b/unittests/tools/test_neuvector_compliance_parser.py index 1370d61143..51b92b8880 100644 --- a/unittests/tools/test_neuvector_compliance_parser.py +++ b/unittests/tools/test_neuvector_compliance_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,14 +7,14 @@ class TestNeuVectorComplianceParser(DojoTestCase): def test_parse_file_with_no_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector_compliance/no_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector_compliance/no_vuln.json", encoding="utf-8") parser = NeuVectorComplianceParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(0, len(findings)) def test_parse_file_with_one_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector_compliance/one_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector_compliance/one_vuln.json", encoding="utf-8") parser = NeuVectorComplianceParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -23,7 +22,7 @@ def test_parse_file_with_one_vuln(self): self.assertEqual("docker_D.1.1.11", findings[0].vuln_id_from_tool) def test_parse_file_with_many_vulns(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector_compliance/many_vulns.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector_compliance/many_vulns.json", encoding="utf-8") parser = NeuVectorComplianceParser() findings = parser.get_findings(testfile, Test()) testfile.close() diff --git a/unittests/tools/test_neuvector_parser.py b/unittests/tools/test_neuvector_parser.py index ed4507dd9d..cc2925a1bd 100644 --- a/unittests/tools/test_neuvector_parser.py +++ b/unittests/tools/test_neuvector_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,14 +7,14 @@ class TestNeuVectorParser(DojoTestCase): def test_parse_file_with_no_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector/no_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector/no_vuln.json", encoding="utf-8") parser = NeuVectorParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(0, len(findings)) def test_parse_file_with_one_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector/one_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector/one_vuln.json", encoding="utf-8") parser = NeuVectorParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -24,7 +23,7 @@ def test_parse_file_with_one_vuln(self): self.assertEqual("CVE-2015-8356", findings[0].unsaved_vulnerability_ids[0]) def test_parse_file_with_many_vulns(self): - testfile = open(path.join(Path(__file__).parent, "../scans/neuvector/many_vulns.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/neuvector/many_vulns.json", encoding="utf-8") parser = NeuVectorParser() findings = parser.get_findings(testfile, Test()) testfile.close() diff --git a/unittests/tools/test_npm_audit_7_plus_parser.py b/unittests/tools/test_npm_audit_7_plus_parser.py index 0d937584c8..2fad71a169 100644 --- a/unittests/tools/test_npm_audit_7_plus_parser.py +++ b/unittests/tools/test_npm_audit_7_plus_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,14 +7,14 @@ class TestNpmAudit7PlusParser(DojoTestCase): def test_npm_audit_7_plus_parser_with_no_vuln_has_no_findings(self): - testfile = open(path.join(Path(__file__).parent, "../scans/npm_audit_7_plus/no_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/npm_audit_7_plus/no_vuln.json", encoding="utf-8") parser = NpmAudit7PlusParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(0, len(findings)) def test_npm_audit_7_plus_parser_with_one_vuln_has_one_findings(self): - testfile = open(path.join(Path(__file__).parent, "../scans/npm_audit_7_plus/one_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/npm_audit_7_plus/one_vuln.json", encoding="utf-8") parser = NpmAudit7PlusParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -29,7 +28,7 @@ def test_npm_audit_7_plus_parser_with_one_vuln_has_one_findings(self): self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:L", finding.cvssv3) def test_npm_audit_7_plus_parser_with_many_vuln_has_many_findings(self): - testfile = open(path.join(Path(__file__).parent, "../scans/npm_audit_7_plus/many_vulns.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/npm_audit_7_plus/many_vulns.json", encoding="utf-8") parser = NpmAudit7PlusParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -43,7 +42,7 @@ def test_npm_audit_7_plus_parser_with_many_vuln_has_many_findings(self): self.assertEqual("@vercel/fun", finding.title) def test_npm_audit_7_plus_parser_issue_10801(self): - testfile = open(path.join(Path(__file__).parent, "../scans/npm_audit_7_plus/issue_10801.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/npm_audit_7_plus/issue_10801.json", encoding="utf-8") parser = NpmAudit7PlusParser() findings = parser.get_findings(testfile, Test()) testfile.close() diff --git a/unittests/tools/test_npm_audit_parser.py b/unittests/tools/test_npm_audit_parser.py index c15da91da7..6d03160ef3 100644 --- a/unittests/tools/test_npm_audit_parser.py +++ b/unittests/tools/test_npm_audit_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,13 +7,13 @@ class TestNpmAuditParser(DojoTestCase): def test_npm_audit_parser_with_no_vuln_has_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/npm_audit/no_vuln.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/no_vuln.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_npm_audit_parser_with_one_criticle_vuln_has_one_findings(self): - with open(path.join(Path(__file__).parent, "../scans/npm_audit/one_vuln.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/one_vuln.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -23,7 +22,7 @@ def test_npm_audit_parser_with_one_criticle_vuln_has_one_findings(self): self.assertEqual("1.9.2", findings[0].component_version) def test_npm_audit_parser_with_many_vuln_has_many_findings(self): - with open(path.join(Path(__file__).parent, "../scans/npm_audit/many_vuln.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/many_vuln.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(5, len(findings)) @@ -40,7 +39,7 @@ def test_npm_audit_parser_with_many_vuln_has_many_findings(self): def test_npm_audit_parser_multiple_cwes_per_finding(self): # cwes formatted as escaped list: "cwe": "[\"CWE-346\",\"CWE-453\"]", - with open(path.join(Path(__file__).parent, "../scans/npm_audit/multiple_cwes.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/multiple_cwes.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(41, len(findings)) @@ -49,14 +48,14 @@ def test_npm_audit_parser_multiple_cwes_per_finding(self): def test_npm_audit_parser_multiple_cwes_per_finding_list(self): # cwes formatted as proper list: "cwe": ["CWE-918","CWE-1333"], - with open(path.join(Path(__file__).parent, "../scans/npm_audit/multiple_cwes2.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/multiple_cwes2.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(6, len(findings)) self.assertEqual(918, findings[0].cwe) def test_npm_audit_parser_with_one_criticle_vuln_has_null_as_cwe(self): - with open(path.join(Path(__file__).parent, "../scans/npm_audit/cwe_null.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/cwe_null.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -66,7 +65,7 @@ def test_npm_audit_parser_with_one_criticle_vuln_has_null_as_cwe(self): def test_npm_audit_parser_empty_with_error(self): with self.assertRaises(ValueError) as context: - with open(path.join(Path(__file__).parent, "../scans/npm_audit/empty_with_error.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/empty_with_error.json", encoding="utf-8") as testfile: parser = NpmAuditParser() parser.get_findings(testfile, Test()) @@ -75,7 +74,7 @@ def test_npm_audit_parser_empty_with_error(self): def test_npm_audit_parser_many_vuln_npm7(self): with self.assertRaises(ValueError) as context: - with open(path.join(Path(__file__).parent, "../scans/npm_audit/many_vuln_npm7.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/many_vuln_npm7.json", encoding="utf-8") as testfile: parser = NpmAuditParser() parser.get_findings(testfile, Test()) @@ -91,7 +90,7 @@ def test_npm_audit_censored_hash(self): self.assertEqual(censored_path, "censored_by_npm_audit>censored_by_npm_audit>lodash") def test_npm_audit_parser_issue_7897(self): - with open(path.join(Path(__file__).parent, "../scans/npm_audit/issue_7897.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/npm_audit/issue_7897.json", encoding="utf-8") as testfile: parser = NpmAuditParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(5, len(findings)) diff --git a/unittests/tools/test_ort_parser.py b/unittests/tools/test_ort_parser.py index d42098d184..b222f900ad 100644 --- a/unittests/tools/test_ort_parser.py +++ b/unittests/tools/test_ort_parser.py @@ -11,7 +11,7 @@ def test_parse_without_file_has_no_finding(self): def test_parse_file_has_many_finding_one_tool(self): testfile = open( - get_unit_tests_path() + "/scans/ort/evaluated-model-reporter-test-output.json", encoding="utf-8", + get_unit_tests_path() / "scans/ort/evaluated-model-reporter-test-output.json", encoding="utf-8", ) parser = OrtParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_ossindex_devaudit_parser.py b/unittests/tools/test_ossindex_devaudit_parser.py index 9b11e19cee..13d5246857 100644 --- a/unittests/tools/test_ossindex_devaudit_parser.py +++ b/unittests/tools/test_ossindex_devaudit_parser.py @@ -7,7 +7,7 @@ class TestOssIndexDevauditParser(DojoTestCase): def test_ossindex_devaudit_parser_with_no_vulns_has_no_findings(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_no_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_no_vuln.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -16,7 +16,7 @@ def test_ossindex_devaudit_parser_with_no_vulns_has_no_findings(self): def test_ossindex_devaudit_parser_with_one_critical_vuln_has_one_finding(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_one_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_one_vuln.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -25,7 +25,7 @@ def test_ossindex_devaudit_parser_with_one_critical_vuln_has_one_finding(self): def test_ossindex_devaudit_parser_with_multiple_vulns_has_multiple_finding(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_multiple_vulns.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_multiple_vulns.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -34,7 +34,7 @@ def test_ossindex_devaudit_parser_with_multiple_vulns_has_multiple_finding(self) def test_ossindex_devaudit_parser_with_no_cve_returns_info_severity(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_vuln_no_cvssscore.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_vuln_no_cvssscore.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -43,7 +43,7 @@ def test_ossindex_devaudit_parser_with_no_cve_returns_info_severity(self): def test_ossindex_devaudit_parser_with_reference_shows_reference(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_one_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_one_vuln.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -55,7 +55,7 @@ def test_ossindex_devaudit_parser_with_reference_shows_reference(self): def test_ossindex_devaudit_parser_with_empty_reference_shows_empty_reference(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_empty_reference.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_empty_reference.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -66,7 +66,7 @@ def test_ossindex_devaudit_parser_with_empty_reference_shows_empty_reference(sel def test_ossindex_devaudit_parser_with_missing_reference_shows_empty(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_missing_reference.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_missing_reference.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -77,7 +77,7 @@ def test_ossindex_devaudit_parser_with_missing_reference_shows_empty(self): def test_ossindex_devaudit_parser_with_missing_cwe_shows_1035(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_missing_cwe.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_missing_cwe.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -88,7 +88,7 @@ def test_ossindex_devaudit_parser_with_missing_cwe_shows_1035(self): def test_ossindex_devaudit_parser_with_null_cwe_shows_1035(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_null_cwe.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_null_cwe.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -99,7 +99,7 @@ def test_ossindex_devaudit_parser_with_null_cwe_shows_1035(self): def test_ossindex_devaudit_parser_with_empty_cwe_shows_1035(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_empty_cwe.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_empty_cwe.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -110,7 +110,7 @@ def test_ossindex_devaudit_parser_with_empty_cwe_shows_1035(self): def test_ossindex_devaudit_parser_get_severity_shows_info(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_severity_info.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_severity_info.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -121,7 +121,7 @@ def test_ossindex_devaudit_parser_get_severity_shows_info(self): def test_ossindex_devaudit_parser_get_severity_shows_critical(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_severity_critical.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_severity_critical.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -132,7 +132,7 @@ def test_ossindex_devaudit_parser_get_severity_shows_critical(self): def test_ossindex_devaudit_parser_get_severity_shows_high(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_severity_high.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_severity_high.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -143,7 +143,7 @@ def test_ossindex_devaudit_parser_get_severity_shows_high(self): def test_ossindex_devaudit_parser_get_severity_shows_medium(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_severity_medium.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_severity_medium.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) @@ -154,7 +154,7 @@ def test_ossindex_devaudit_parser_get_severity_shows_medium(self): def test_ossindex_devaudit_parser_get_severity_shows_low(self): testfile = open( - get_unit_tests_path() + "/scans/ossindex_devaudit/ossindex_devaudit_severity_low.json", encoding="utf-8", + get_unit_tests_path() / "scans/ossindex_devaudit/ossindex_devaudit_severity_low.json", encoding="utf-8", ) parser = OssIndexDevauditParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_osv_scanner_parser.py b/unittests/tools/test_osv_scanner_parser.py index 196ff09336..119c22006d 100644 --- a/unittests/tools/test_osv_scanner_parser.py +++ b/unittests/tools/test_osv_scanner_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,13 +7,13 @@ class TestOSVScannerParser(DojoTestCase): def test_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/osv_scanner/no_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/osv_scanner/no_findings.json", encoding="utf-8") as testfile: parser = OSVScannerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_some_findings(self): - with open(path.join(Path(__file__).parent, "../scans/osv_scanner/some_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/osv_scanner/some_findings.json", encoding="utf-8") as testfile: parser = OSVScannerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -26,7 +25,7 @@ def test_some_findings(self): self.assertEqual(finding.severity, "Low") def test_many_findings(self): - with open(path.join(Path(__file__).parent, "../scans/osv_scanner/many_findings.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/osv_scanner/many_findings.json", encoding="utf-8") as testfile: parser = OSVScannerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(66, len(findings)) diff --git a/unittests/tools/test_outpost24_parser.py b/unittests/tools/test_outpost24_parser.py index fd132e649b..3e66f9d252 100644 --- a/unittests/tools/test_outpost24_parser.py +++ b/unittests/tools/test_outpost24_parser.py @@ -21,10 +21,10 @@ def assert_file_has_n_items(self, filename, item_count): self.assertEqual("CVE-2019-9315", findings[0].unsaved_vulnerability_ids[0]) def test_parser_no_items(self): - self.assert_file_has_n_items(get_unit_tests_path() + "/scans/outpost24/none.xml", 0) + self.assert_file_has_n_items(get_unit_tests_path() / "scans/outpost24/none.xml", 0) def test_parser_one_item(self): - self.assert_file_has_n_items(get_unit_tests_path() + "/scans/outpost24/one.xml", 1) + self.assert_file_has_n_items(get_unit_tests_path() / "scans/outpost24/one.xml", 1) def test_parser_sample_items(self): - self.assert_file_has_n_items(get_unit_tests_path() + "/scans/outpost24/sample.xml", 24) + self.assert_file_has_n_items(get_unit_tests_path() / "scans/outpost24/sample.xml", 24) diff --git a/unittests/tools/test_php_symfony_security_check_parser.py b/unittests/tools/test_php_symfony_security_check_parser.py index 6786d54b9c..697a146db3 100644 --- a/unittests/tools/test_php_symfony_security_check_parser.py +++ b/unittests/tools/test_php_symfony_security_check_parser.py @@ -7,7 +7,7 @@ class TestPhpSymfonySecurityCheckerParser(DojoTestCase): def test_php_symfony_security_check_parser_with_no_vuln_has_no_findings(self): testfile = open( - get_unit_tests_path() + "/scans/php_symfony_security_check/php_symfony_no_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/php_symfony_security_check/php_symfony_no_vuln.json", encoding="utf-8", ) parser = PhpSymfonySecurityCheckParser() findings = parser.get_findings(testfile, Test()) @@ -19,7 +19,7 @@ def test_php_symfony_security_check_parser_with_one_criticle_vuln_has_one_findin self, ): testfile = open( - get_unit_tests_path() + "/scans/php_symfony_security_check/php_symfony_one_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/php_symfony_security_check/php_symfony_one_vuln.json", encoding="utf-8", ) parser = PhpSymfonySecurityCheckParser() findings = parser.get_findings(testfile, Test()) @@ -28,7 +28,7 @@ def test_php_symfony_security_check_parser_with_one_criticle_vuln_has_one_findin def test_php_symfony_security_check_parser_with_many_vuln_has_many_findings(self): testfile = open( - get_unit_tests_path() + "/scans/php_symfony_security_check/php_symfony_many_vuln.json", encoding="utf-8", + get_unit_tests_path() / "scans/php_symfony_security_check/php_symfony_many_vuln.json", encoding="utf-8", ) parser = PhpSymfonySecurityCheckParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_qualys_hacker_guardian_parser.py b/unittests/tools/test_qualys_hacker_guardian_parser.py index 47bd820ffb..2fa2e89e40 100644 --- a/unittests/tools/test_qualys_hacker_guardian_parser.py +++ b/unittests/tools/test_qualys_hacker_guardian_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,13 +8,13 @@ class TestQualysHackerGuardianParser(DojoTestCase): def test_qualys_hacker_guardian_parser_with_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/qualys_hacker_guardian/zero_finding.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/qualys_hacker_guardian/zero_finding.csv", encoding="utf-8") as testfile: parser = QualysHackerGuardianParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_qualys_hacker_guardian_parser_with_one_findings(self): - with open(path.join(Path(__file__).parent, "../scans/qualys_hacker_guardian/one_finding.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/qualys_hacker_guardian/one_finding.csv", encoding="utf-8") as testfile: parser = QualysHackerGuardianParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -26,7 +25,7 @@ def test_qualys_hacker_guardian_parser_with_one_findings(self): self.assertEqual(len(finding.unsaved_endpoints), 2) def test_qualys_hacker_guardian_parser_with_many_findings(self): - with open(path.join(Path(__file__).parent, "../scans/qualys_hacker_guardian/many_finding.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/qualys_hacker_guardian/many_finding.csv", encoding="utf-8") as testfile: parser = QualysHackerGuardianParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) diff --git a/unittests/tools/test_qualys_infrascan_webgui_parser.py b/unittests/tools/test_qualys_infrascan_webgui_parser.py index b76aeba84d..b44a016859 100644 --- a/unittests/tools/test_qualys_infrascan_webgui_parser.py +++ b/unittests/tools/test_qualys_infrascan_webgui_parser.py @@ -11,7 +11,7 @@ class TestQualysInfrascanWebguiParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/qualys_infrascan_webgui/qualys_infrascan_webgui_0.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_infrascan_webgui/qualys_infrascan_webgui_0.xml", encoding="utf-8", ) as testfile: parser = QualysInfrascanWebguiParser() findings = parser.get_findings(testfile, Test()) @@ -21,7 +21,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): # + also verify data with one test def test_parse_file_with_one_vuln_has_one_findings(self): with open( - get_unit_tests_path() + "/scans/qualys_infrascan_webgui/qualys_infrascan_webgui_1.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_infrascan_webgui/qualys_infrascan_webgui_1.xml", encoding="utf-8", ) as testfile: parser = QualysInfrascanWebguiParser() findings = parser.get_findings(testfile, Test()) @@ -38,7 +38,7 @@ def test_parse_file_with_one_vuln_has_one_findings(self): # Sample with Multiple Test def test_parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/qualys_infrascan_webgui/qualys_infrascan_webgui_multiple.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_infrascan_webgui/qualys_infrascan_webgui_multiple.xml", encoding="utf-8", ) as testfile: parser = QualysInfrascanWebguiParser() findings = parser.get_findings(testfile, Test()) @@ -61,7 +61,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings(self): # Sample with Multiple Test def test_parse_file_with_finding_no_dns(self): with open( - get_unit_tests_path() + "/scans/qualys_infrascan_webgui/qualys_infrascan_webgui_3.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_infrascan_webgui/qualys_infrascan_webgui_3.xml", encoding="utf-8", ) as testfile: parser = QualysInfrascanWebguiParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_qualys_parser.py b/unittests/tools/test_qualys_parser.py index 15840f8561..b1120de895 100644 --- a/unittests/tools/test_qualys_parser.py +++ b/unittests/tools/test_qualys_parser.py @@ -18,7 +18,7 @@ def test_parse_file_with_no_vuln_has_no_findings(self): def parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/qualys/empty.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/empty.xml", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -35,7 +35,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings(self): def parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -82,7 +82,7 @@ def test_parse_file_with_no_vuln_has_no_findings_csv(self): def parse_file_with_no_vuln_has_no_findings_csv(self): with open( - get_unit_tests_path() + "/scans/qualys/empty.csv", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/empty.csv", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -99,7 +99,7 @@ def test_parse_file_with_multiple_vuln_has_multiple_findings_csv(self): def parse_file_with_multiple_vuln_has_multiple_findings_csv(self): with open( - get_unit_tests_path() + "/scans/qualys/Qualys_Sample_Report.csv", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/Qualys_Sample_Report.csv", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -136,7 +136,7 @@ def parse_file_with_multiple_vuln_has_multiple_findings_csv(self): def test_parse_file_monthly_pci_issue6932(self): with open( - get_unit_tests_path() + "/scans/qualys/monthly_pci_issue6932.csv", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/monthly_pci_issue6932.csv", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -144,7 +144,7 @@ def test_parse_file_monthly_pci_issue6932(self): def test_parse_file_with_cvss_values_and_scores(self): with open( - get_unit_tests_path() + "/scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8", ) as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) @@ -179,7 +179,7 @@ def test_parse_file_with_cvss_values_and_scores(self): ) def test_get_severity_legacy(self): - with open(get_unit_tests_path() + "/scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8") as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) counts = {} @@ -197,7 +197,7 @@ def test_get_severity_legacy(self): @override_settings(USE_QUALYS_LEGACY_SEVERITY_PARSING=False) def test_get_severity(self): - with open(get_unit_tests_path() + "/scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/qualys/Qualys_Sample_Report.xml", encoding="utf-8") as testfile: parser = QualysParser() findings = parser.get_findings(testfile, Test()) counts = {} diff --git a/unittests/tools/test_qualys_webapp_parser.py b/unittests/tools/test_qualys_webapp_parser.py index 71bd295634..5c8f369ef3 100644 --- a/unittests/tools/test_qualys_webapp_parser.py +++ b/unittests/tools/test_qualys_webapp_parser.py @@ -31,7 +31,7 @@ def test_qualys_webapp_parser_with_one_criticle_vuln_has_one_findings(self): def test_qualys_webapp_parser_with_many_vuln_has_many_findings(self): testfile = open( - get_unit_tests_path() + "/scans/qualys_webapp/qualys_webapp_many_vuln.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_webapp/qualys_webapp_many_vuln.xml", encoding="utf-8", ) parser = QualysWebAppParser() findings = parser.get_findings(testfile, Test()) @@ -45,7 +45,7 @@ def test_qualys_webapp_parser_with_many_vuln_has_many_findings(self): def test_qualys_webapp_parser_info_is_vuln(self): testfile = open( - get_unit_tests_path() + "/scans/qualys_webapp/qualys_webapp_many_vuln.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_webapp/qualys_webapp_many_vuln.xml", encoding="utf-8", ) parser = QualysWebAppParser() findings = parser.get_findings(testfile, Test(), enable_weakness=True) @@ -59,7 +59,7 @@ def test_qualys_webapp_parser_info_is_vuln(self): def test_discussion_10239(self): testfile = open( - get_unit_tests_path() + "/scans/qualys_webapp/discussion_10239.xml", encoding="utf-8", + get_unit_tests_path() / "scans/qualys_webapp/discussion_10239.xml", encoding="utf-8", ) parser = QualysWebAppParser() findings = parser.get_findings(testfile, Test(), enable_weakness=True) diff --git a/unittests/tools/test_rapplex_parser.py b/unittests/tools/test_rapplex_parser.py index 97937cbd4d..ecbe8c2f56 100644 --- a/unittests/tools/test_rapplex_parser.py +++ b/unittests/tools/test_rapplex_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -9,13 +8,13 @@ class TestRapplexParser(DojoTestCase): def test_rapplex_parser_with_no_findings(self): - with open(path.join(Path(__file__).parent, "../scans/rapplex/rapplex_zero_vul.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/rapplex/rapplex_zero_vul.json", encoding="utf-8") as testfile: parser = RapplexParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_rapplex_parser_with_one_findings(self): - with open(path.join(Path(__file__).parent, "../scans/rapplex/rapplex_one_vul.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/rapplex/rapplex_one_vul.json", encoding="utf-8") as testfile: parser = RapplexParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -27,7 +26,7 @@ def test_rapplex_parser_with_one_findings(self): self.assertIsNotNone(finding.references) def test_rapplex_parser_with_many_findings(self): - with open(path.join(Path(__file__).parent, "../scans/rapplex/rapplex_many_vul.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/rapplex/rapplex_many_vul.json", encoding="utf-8") as testfile: parser = RapplexParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(8, len(findings)) diff --git a/unittests/tools/test_sarif_parser.py b/unittests/tools/test_sarif_parser.py index 4b63b2e348..bdc21285cf 100644 --- a/unittests/tools/test_sarif_parser.py +++ b/unittests/tools/test_sarif_parser.py @@ -1,5 +1,4 @@ import datetime -from os import path from pathlib import Path from dojo.models import Finding, Test @@ -18,9 +17,8 @@ def common_checks(self, finding): def test_example_report(self): with open( - path.join( - get_unit_tests_path() + "/scans/sarif/DefectDojo_django-DefectDojo__2020-12-11_13 42 10__export.sarif", - ), encoding="utf-8", + get_unit_tests_path() / "scans/sarif/DefectDojo_django-DefectDojo__2020-12-11_13 42 10__export.sarif", + encoding="utf-8", )as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) @@ -30,7 +28,7 @@ def test_example_report(self): def test_suppression_report(self): """Test report file having different suppression definitions""" - with open(path.join(Path(__file__).parent, "../scans/sarif/suppression_test.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/suppression_test.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) for finding in findings: @@ -42,7 +40,7 @@ def test_suppression_report(self): self.assertEqual(True, finding.active) def test_example2_report(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -70,13 +68,13 @@ def test_example2_report(self): self.common_checks(finding) def test_example_k1_report(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k1.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k1.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_example_k2_report(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k2.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k2.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -91,7 +89,7 @@ def test_example_k2_report(self): self.common_checks(finding) def test_example_k3_report(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k3.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k3.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -101,7 +99,7 @@ def test_example_k3_report(self): self.common_checks(finding) def test_example_k4_report_mitigation(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k4.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k4.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -118,7 +116,7 @@ def test_example_k4_report_mitigation(self): def test_example_report_ms(self): """Report file come from Microsoft SARIF sdk on GitHub""" - with open(path.join(Path(__file__).parent, "../scans/sarif/SuppressionTestCurrent.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/SuppressionTestCurrent.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(4, len(findings)) @@ -128,7 +126,7 @@ def test_example_report_ms(self): self.common_checks(finding) def test_example_report_semgrep(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/semgrepowasp-benchmark-sample.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/semgrepowasp-benchmark-sample.sarif", encoding="utf-8") as testfile: test = Test() parser = SarifParser() findings = parser.get_findings(testfile, test) @@ -142,7 +140,7 @@ def test_example_report_semgrep(self): self.common_checks(finding) def test_example_report_scanlift_dependency_check(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/dependency_check.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/dependency_check.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(13, len(findings)) @@ -165,7 +163,7 @@ def test_example_report_scanlift_dependency_check(self): self.common_checks(finding) def test_example_report_scanlift_bash(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/bash-report.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/bash-report.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(27, len(findings)) @@ -194,7 +192,7 @@ def test_example_report_scanlift_bash(self): self.common_checks(finding) def test_example_report_taint_python(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/taint-python-report.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/taint-python-report.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(11, len(findings)) @@ -236,7 +234,7 @@ def test_example_report_taint_python(self): def test_njsscan(self): """Generated with opensecurity/njsscan (https://github.com/ajinabraham/njsscan)""" - with open(path.join(Path(__file__).parent, "../scans/sarif/njsscan.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/njsscan.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) @@ -263,7 +261,7 @@ def test_njsscan(self): def test_dockle(self): """Generated with goodwithtech/dockle (https://github.com/goodwithtech/dockle)""" - with open(path.join(Path(__file__).parent, "../scans/sarif/dockle_0_3_15.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/dockle_0_3_15.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(4, len(findings)) @@ -311,7 +309,7 @@ def test_dockle(self): ) def test_mobsfscan(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/mobsfscan.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/mobsfscan.json", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(18, len(findings)) @@ -319,7 +317,7 @@ def test_mobsfscan(self): self.common_checks(finding) def test_gitleaks(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/gitleaks_7.5.0.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/gitleaks_7.5.0.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(8, len(findings)) @@ -369,7 +367,7 @@ def test_gitleaks(self): self.assertEqual(37, finding.line) def test_flawfinder(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/flawfinder.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/flawfinder.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(53, len(findings)) @@ -445,7 +443,7 @@ def test_flawfinder(self): self.assertEqual("https://cwe.mitre.org/data/definitions/120.html", finding.references) def test_flawfinder_interfacev2(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/flawfinder.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/flawfinder.sarif", encoding="utf-8") as testfile: parser = SarifParser() tests = parser.get_tests(parser.get_scan_types()[0], testfile) self.assertEqual(1, len(tests)) @@ -514,7 +512,7 @@ def test_flawfinder_interfacev2(self): self.assertEqual("https://cwe.mitre.org/data/definitions/120.html", finding.references) def test_appendix_k1_double_interfacev2(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/appendix_k1_double.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/appendix_k1_double.sarif", encoding="utf-8") as testfile: parser = SarifParser() tests = parser.get_tests(parser.get_scan_types()[0], testfile) self.assertEqual(2, len(tests)) @@ -530,7 +528,7 @@ def test_appendix_k1_double_interfacev2(self): self.assertEqual(0, len(findings)) def test_codeql_snippet_report(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/codeQL-output.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/codeQL-output.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(72, len(findings)) @@ -556,7 +554,7 @@ def test_codeql_snippet_report(self): self.common_checks(finding) def test_severity_cvss_from_grype(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/cxf-3.4.6.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/cxf-3.4.6.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(22, len(findings)) @@ -585,14 +583,14 @@ def test_get_fingerprints_hashes(self): ) def test_tags_from_result_properties(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/taint-python-report.sarif"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/taint-python-report.sarif", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) item = findings[0] self.assertEqual(["Scan"], item.tags) def test_severity_in_properties(self): - with open(path.join(Path(__file__).parent, "../scans/sarif/issue_10191.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sarif/issue_10191.json", encoding="utf-8") as testfile: parser = SarifParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(77, len(findings)) diff --git a/unittests/tools/test_solar_appscreener_parser.py b/unittests/tools/test_solar_appscreener_parser.py index b6e327c184..8f10a43c7f 100644 --- a/unittests/tools/test_solar_appscreener_parser.py +++ b/unittests/tools/test_solar_appscreener_parser.py @@ -7,7 +7,7 @@ class TestSolarAppscreenerParser(DojoTestCase): def test_solar_appscreener_parser_with_no_vuln_has_no_findings(self): testfile = open( - get_unit_tests_path() + "/scans/solar_appscreener/solar_appscreener_zero_vul.csv", encoding="utf-8") + get_unit_tests_path() / "scans/solar_appscreener/solar_appscreener_zero_vul.csv", encoding="utf-8") parser = SolarAppscreenerParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -15,7 +15,7 @@ def test_solar_appscreener_parser_with_no_vuln_has_no_findings(self): def test_solar_appscreener_parser_with_one_criticle_vuln_has_one_findings(self): testfile = open( - get_unit_tests_path() + "/scans/solar_appscreener/solar_appscreener_one_vul.csv", encoding="utf-8") + get_unit_tests_path() / "scans/solar_appscreener/solar_appscreener_one_vul.csv", encoding="utf-8") parser = SolarAppscreenerParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -33,7 +33,7 @@ def test_solar_appscreener_parser_with_one_criticle_vuln_has_one_findings(self): def test_solar_appscreener_parser_with_many_vuln_has_many_findings(self): testfile = open( - get_unit_tests_path() + "/scans/solar_appscreener/solar_appscreener_many_vul.csv", encoding="utf-8") + get_unit_tests_path() / "scans/solar_appscreener/solar_appscreener_many_vul.csv", encoding="utf-8") parser = SolarAppscreenerParser() findings = parser.get_findings(testfile, Test()) testfile.close() diff --git a/unittests/tools/test_sonarqube_parser.py b/unittests/tools/test_sonarqube_parser.py index ef4912510b..fcdf4b972e 100644 --- a/unittests/tools/test_sonarqube_parser.py +++ b/unittests/tools/test_sonarqube_parser.py @@ -21,7 +21,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-no-finding.html", + get_unit_tests_path() / "scans/sonarqube/sonar-no-finding.html", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -31,7 +31,7 @@ def test_file_name_aggregated_parse_file_with_no_vulnerabilities_has_no_findings # SonarQube Scan detailed - no finding def test_detailed_parse_file_with_no_vulnerabilities_has_no_findings(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-no-finding.html", + get_unit_tests_path() / "scans/sonarqube/sonar-no-finding.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -44,7 +44,7 @@ def test_file_name_aggregated_parse_file_with_single_vulnerability_has_single_fi self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-single-finding.html", + get_unit_tests_path() / "scans/sonarqube/sonar-single-finding.html", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -93,7 +93,7 @@ def test_file_name_aggregated_parse_file_with_single_vulnerability_has_single_fi def test_detailed_parse_file_with_single_vulnerability_has_single_finding(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-single-finding.html", + get_unit_tests_path() / "scans/sonarqube/sonar-single-finding.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -141,7 +141,7 @@ def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-6-findings.html", + get_unit_tests_path() / "scans/sonarqube/sonar-6-findings.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -154,7 +154,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi self, ): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-6-findings.html", + get_unit_tests_path() / "scans/sonarqube/sonar-6-findings.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -167,7 +167,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi def test_detailed_parse_file_with_table_in_table(self): """Test parsing when the vulnerability details include a table, with tr and td that should be ignored when looking for list of rules""" my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-table-in-table.html", + get_unit_tests_path() / "scans/sonarqube/sonar-table-in-table.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -247,7 +247,7 @@ def test_detailed_parse_file_with_table_in_table(self): def test_detailed_parse_file_with_rule_undefined(self): """The vulnerability's rule is not in the list of rules""" my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-rule-undefined.html", + get_unit_tests_path() / "scans/sonarqube/sonar-rule-undefined.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -293,7 +293,7 @@ def test_detailed_parse_file_with_rule_undefined(self): # SonarQube Scan - report with aggregations to be made def test_file_name_aggregated_parse_file_with_vuln_on_same_filename(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-4-findings-3-to-aggregate.html", + get_unit_tests_path() / "scans/sonarqube/sonar-4-findings-3-to-aggregate.html", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -356,7 +356,7 @@ def test_file_name_aggregated_parse_file_with_vuln_on_same_filename(self): # SonarQube Scan detailed - report with aggregations to be made def test_detailed_parse_file_with_vuln_on_same_filename(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-4-findings-3-to-aggregate.html", + get_unit_tests_path() / "scans/sonarqube/sonar-4-findings-3-to-aggregate.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -370,7 +370,7 @@ def test_detailed_parse_file_with_vuln_issue_3725(self): SonarQube Scan detailed - report that crash see: https://github.com/DefectDojo/django-DefectDojo/issues/3725 """ - my_file_handle, _product, _engagement, test = self.init(get_unit_tests_path() + "/scans/sonarqube/sonar.html") + my_file_handle, _product, _engagement, test = self.init(get_unit_tests_path() / "scans/sonarqube/sonar.html") parser = SonarQubeParser() parser.set_mode("detailed") findings = parser.get_findings(my_file_handle, test) @@ -385,7 +385,7 @@ def test_detailed_parse_file_table_has_whitespace(self): Data table will have some whitespaces, parser should strip it before compare or use these properties. """ my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-table-in-table-with-whitespace.html", + get_unit_tests_path() / "scans/sonarqube/sonar-table-in-table-with-whitespace.html", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -464,7 +464,7 @@ def test_detailed_parse_file_table_has_whitespace(self): def test_detailed_parse_json_file_with_no_vulnerabilities_has_no_findings(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-no-finding.json", + get_unit_tests_path() / "scans/sonarqube/sonar-no-finding.json", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -474,7 +474,7 @@ def test_detailed_parse_json_file_with_no_vulnerabilities_has_no_findings(self): def test_detailed_parse_json_file_with_single_vulnerability_has_single_finding(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-single-finding.json", + get_unit_tests_path() / "scans/sonarqube/sonar-single-finding.json", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -553,7 +553,7 @@ def test_detailed_parse_json_file_with_single_vulnerability_has_single_finding(s def test_detailed_parse_json_file_with_multiple_vulnerabilities_has_multiple_findings(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/sonar-6-findings.json", + get_unit_tests_path() / "scans/sonarqube/sonar-6-findings.json", ) parser = SonarQubeParser() parser.set_mode("detailed") @@ -565,7 +565,7 @@ def test_detailed_parse_json_file_with_multiple_vulnerabilities_has_multiple_fin def test_parse_json_file_from_api_with_multiple_findings_json(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/findings_over_api.json", + get_unit_tests_path() / "scans/sonarqube/findings_over_api.json", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -597,7 +597,7 @@ def test_parse_json_file_from_api_with_multiple_findings_json(self): def test_parse_json_file_from_api_with_multiple_findings_hotspots_json(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/findings_over_api_hotspots.json", + get_unit_tests_path() / "scans/sonarqube/findings_over_api_hotspots.json", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -616,7 +616,7 @@ def test_parse_json_file_from_api_with_multiple_findings_hotspots_json(self): def test_parse_json_file_from_api_with_empty_json(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/findings_over_api_empty.json", + get_unit_tests_path() / "scans/sonarqube/findings_over_api_empty.json", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -625,7 +625,7 @@ def test_parse_json_file_from_api_with_empty_json(self): def test_parse_json_file_from_api_with_emppty_zip(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/empty_zip.zip", + get_unit_tests_path() / "scans/sonarqube/empty_zip.zip", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -634,7 +634,7 @@ def test_parse_json_file_from_api_with_emppty_zip(self): def test_parse_json_file_from_api_with_multiple_findings_zip(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/findings_over_api.zip", + get_unit_tests_path() / "scans/sonarqube/findings_over_api.zip", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) @@ -654,7 +654,7 @@ def test_parse_json_file_from_api_with_multiple_findings_zip(self): def test_parse_json_file_issue_10150(self): my_file_handle, _product, _engagement, test = self.init( - get_unit_tests_path() + "/scans/sonarqube/issue_10150.json", + get_unit_tests_path() / "scans/sonarqube/issue_10150.json", ) parser = SonarQubeParser() findings = parser.get_findings(my_file_handle, test) diff --git a/unittests/tools/test_spotbugs_parser.py b/unittests/tools/test_spotbugs_parser.py index 7a549f3639..629b9d4ea0 100644 --- a/unittests/tools/test_spotbugs_parser.py +++ b/unittests/tools/test_spotbugs_parser.py @@ -6,35 +6,35 @@ class TestSpotbugsParser(DojoTestCase): def test_no_findings(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/no_finding.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/no_finding.xml", Test()) self.assertEqual(0, len(findings)) def test_parse_many_finding(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) self.assertEqual(81, len(findings)) def test_find_sast_source_line(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] self.assertEqual(95, test_finding.sast_source_line) def test_find_sast_source_path(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] self.assertEqual("securitytest/command/IdentityFunctionCommandInjection.kt", test_finding.sast_source_file_path) def test_find_source_line(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] self.assertEqual(95, test_finding.line) def test_find_file_path(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] self.assertEqual("securitytest/command/IdentityFunctionCommandInjection.kt", test_finding.file_path) @@ -71,7 +71,7 @@ def test_file(self): def test_description(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] # Test if line 13 is correct self.assertEqual( @@ -80,14 +80,14 @@ def test_description(self): def test_mitigation(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] # Test if line 8 is correct self.assertEqual("#### Example", test_finding.mitigation.splitlines()[7]) def test_references(self): parser = SpotbugsParser() - findings = parser.get_findings(get_unit_tests_path() + "/scans/spotbugs/many_findings.xml", Test()) + findings = parser.get_findings(get_unit_tests_path() / "scans/spotbugs/many_findings.xml", Test()) test_finding = findings[0] # Test if line 2 is correct self.assertEqual( diff --git a/unittests/tools/test_sslyze_parser.py b/unittests/tools/test_sslyze_parser.py index 05349de67a..814304cab0 100644 --- a/unittests/tools/test_sslyze_parser.py +++ b/unittests/tools/test_sslyze_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,19 +7,19 @@ class TestSslyzeJSONParser(DojoTestCase): def test_parse_json_file_with_one_target_has_zero_vuln_old(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_zero_vuln_old.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_zero_vuln_old.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_json_file_issue_9848(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/issue_9848.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/issue_9848.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) def test_parse_json_file_with_one_target_has_one_vuln_old(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_one_vuln_old.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_one_vuln_old.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) @@ -42,7 +41,7 @@ def test_parse_json_file_with_one_target_has_one_vuln_old(self): self.assertEqual(443, endpoint.port) def test_parse_json_file_with_one_target_has_four_vuln_old(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_many_vuln_old.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_many_vuln_old.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) @@ -55,20 +54,20 @@ def test_parse_json_file_with_one_target_has_four_vuln_old(self): self.assertEqual("CVE-2014-0224", findings[1].unsaved_vulnerability_ids[0]) def test_parse_json_file_with_two_target_has_many_vuln_old(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/two_targets_two_vuln_old.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/two_targets_two_vuln_old.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(2, len(findings)) def test_parse_json_file_with_one_target_has_zero_vuln_new(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_zero_vuln_new.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_zero_vuln_new.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_parse_json_file_with_one_target_has_one_vuln_new(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_one_vuln_new.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_one_vuln_new.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) @@ -105,13 +104,13 @@ def test_parse_json_file_with_one_target_has_one_vuln_new(self): self.assertEqual(443, endpoint.port) def test_parse_json_file_with_one_target_has_three_vuln_new(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/one_target_many_vuln_new.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/one_target_many_vuln_new.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(3, len(findings)) def test_parse_json_file_with_two_target_has_many_vuln_new(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/two_targets_many_vuln_new.json"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/two_targets_many_vuln_new.json", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(5, len(findings)) @@ -160,7 +159,7 @@ def test_parse_json_file_with_two_target_has_many_vuln_new(self): class TestSSLyzeXMLParser(DojoTestCase): def test_parse_file_with_one_target_has_three_vuln(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/report_one_target_three_vuln.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/report_one_target_three_vuln.xml", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) for finding in findings: @@ -169,7 +168,7 @@ def test_parse_file_with_one_target_has_three_vuln(self): self.assertEqual(3, len(findings)) def test_parse_xml_file_with_one_target_has_one_vuln(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/report_one_target_one_vuln.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/report_one_target_one_vuln.xml", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) for finding in findings: @@ -178,7 +177,7 @@ def test_parse_xml_file_with_one_target_has_one_vuln(self): self.assertEqual(1, len(findings)) def test_parse_xml_file_with_one_target_has_three_vuln(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/report_one_target_three_vuln.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/report_one_target_three_vuln.xml", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) for finding in findings: @@ -187,7 +186,7 @@ def test_parse_xml_file_with_one_target_has_three_vuln(self): self.assertEqual(3, len(findings)) def test_parse_xml_file_with_two_target_has_many_vuln(self): - with open(path.join(Path(__file__).parent, "../scans/sslyze/report_two_target_many_vuln.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/sslyze/report_two_target_many_vuln.xml", encoding="utf-8") as testfile: parser = SslyzeParser() findings = parser.get_findings(testfile, Test()) for finding in findings: diff --git a/unittests/tools/test_tenable_parser.py b/unittests/tools/test_tenable_parser.py index 449510393c..f342754888 100644 --- a/unittests/tools/test_tenable_parser.py +++ b/unittests/tools/test_tenable_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Engagement, Finding, Product, Test @@ -14,7 +13,7 @@ def create_test(self): return test def test_parse_some_findings_nessus_legacy(self): - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln.xml", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -31,7 +30,7 @@ def test_parse_some_findings_nessus_legacy(self): def test_parse_some_findings_csv_nessus_legacy(self): """Test one report provided by a user""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln.csv", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -61,7 +60,7 @@ def test_parse_some_findings_csv_nessus_legacy(self): def test_parse_some_findings_csv2_nessus_legacy(self): """Test that use default columns of Nessus Pro 8.13.1 (#257)""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln2-default.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln2-default.csv", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -83,7 +82,7 @@ def test_parse_some_findings_csv2_nessus_legacy(self): def test_parse_some_findings_csv2_all_nessus_legacy(self): """Test that use a report with all columns of Nessus Pro 8.13.1 (#257)""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln2-all.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln2-all.csv", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -105,19 +104,19 @@ def test_parse_some_findings_csv2_all_nessus_legacy(self): def test_parse_some_findings_csv_bytes_nessus_legacy(self): """This tests is designed to test the parser with different read modes""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln2-all.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln2-all.csv", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: for endpoint in finding.unsaved_endpoints: endpoint.clean() - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln2-all.csv"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln2-all.csv", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: for endpoint in finding.unsaved_endpoints: endpoint.clean() - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_many_vuln2-all.csv"), "rb") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_many_vuln2-all.csv", "rb") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -126,7 +125,7 @@ def test_parse_some_findings_csv_bytes_nessus_legacy(self): def test_parse_some_findings_samples_nessus_legacy(self): """Test that come from samples repo""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_v_unknown.xml"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_v_unknown.xml", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: @@ -157,7 +156,7 @@ def test_parse_some_findings_samples_nessus_legacy(self): def test_parse_some_findings_with_cvssv3_nessus_legacy(self): """Test with cvssv3""" - with open(path.join(Path(__file__).parent, "../scans/tenable/nessus/nessus_with_cvssv3.nessus"), encoding="utf-8") as testfile: + with open(Path(__file__).parent / "../scans/tenable/nessus/nessus_with_cvssv3.nessus", encoding="utf-8") as testfile: parser = TenableParser() findings = parser.get_findings(testfile, self.create_test()) for finding in findings: diff --git a/unittests/tools/test_threat_composer_parser.py b/unittests/tools/test_threat_composer_parser.py index 9dfbf524c6..2d76354c34 100644 --- a/unittests/tools/test_threat_composer_parser.py +++ b/unittests/tools/test_threat_composer_parser.py @@ -1,4 +1,5 @@ -import os + +from pathlib import Path from dojo.models import Test from dojo.tools.threat_composer.parser import ThreatComposerParser @@ -6,19 +7,19 @@ def sample_path(file_name: str): - return os.path.join("/scans/threat_composer", file_name) + return Path("/scans/threat_composer") / file_name class TestThreatComposerParser(DojoTestCase): def test_threat_composer_parser_with_no_threat_has_no_findings(self): - with open(get_unit_tests_path() + sample_path("threat_composer_zero_threats.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_zero_threats.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(0, len(findings)) def test_threat_composer_parser_with_one_threat_has_one_finding(self): - with open(get_unit_tests_path() + sample_path("threat_composer_one_threat.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_one_threat.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(1, len(findings)) @@ -38,28 +39,28 @@ def test_threat_composer_parser_with_one_threat_has_one_finding(self): self.assertFalse(finding.verified) def test_threat_composer_parser_with_many_threats_has_many_findings(self): - with open(get_unit_tests_path() + sample_path("threat_composer_many_threats.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_many_threats.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(21, len(findings)) def test_threat_composer_parser_empty_with_error(self): with self.assertRaises(ValueError) as context: - with open(get_unit_tests_path() + sample_path("threat_composer_no_threats_with_error.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_no_threats_with_error.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() parser.get_findings(testfile, Test()) self.assertNotIn("No threats found in the JSON file", str(context.exception)) def test_threat_composer_parser_with_one_threat_has_not_assumptions(self): - with open(get_unit_tests_path() + sample_path("threat_composer_broken_assumptions.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_broken_assumptions.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() findings = parser.get_findings(testfile, Test()) finding = findings[0] self.assertNotIn("Assumption", str(finding.description)) def test_threat_composer_parser_with_one_threat_has_not_mitigations(self): - with open(get_unit_tests_path() + sample_path("threat_composer_broken_mitigations.json"), encoding="utf-8") as testfile: + with open(get_unit_tests_path() / sample_path("threat_composer_broken_mitigations.json"), encoding="utf-8") as testfile: parser = ThreatComposerParser() findings = parser.get_findings(testfile, Test()) finding = findings[0] diff --git a/unittests/tools/test_trivy_operator_parser.py b/unittests/tools/test_trivy_operator_parser.py index 2c657d5bae..83ad17e959 100644 --- a/unittests/tools/test_trivy_operator_parser.py +++ b/unittests/tools/test_trivy_operator_parser.py @@ -1,4 +1,3 @@ -import os.path from dojo.models import Test from dojo.tools.trivy_operator.parser import TrivyOperatorParser @@ -6,7 +5,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path() + "/scans/trivy_operator", file_name) + return get_unit_tests_path() / "scans/trivy_operator" / file_name class TestTrivyOperatorParser(DojoTestCase): diff --git a/unittests/tools/test_trivy_parser.py b/unittests/tools/test_trivy_parser.py index 0201368798..7a7ffe5a3a 100644 --- a/unittests/tools/test_trivy_parser.py +++ b/unittests/tools/test_trivy_parser.py @@ -1,4 +1,3 @@ -import os.path import re from dojo.models import Test @@ -7,7 +6,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path() + "/scans/trivy", file_name) + return get_unit_tests_path() / "scans/trivy" / file_name class TestTrivyParser(DojoTestCase): diff --git a/unittests/tools/test_trufflehog3_parser.py b/unittests/tools/test_trufflehog3_parser.py index 2e8a8523c2..5dad7fac21 100644 --- a/unittests/tools/test_trufflehog3_parser.py +++ b/unittests/tools/test_trufflehog3_parser.py @@ -1,5 +1,4 @@ import datetime -import os.path from dojo.models import Test from dojo.tools.trufflehog3.parser import TruffleHog3Parser @@ -7,7 +6,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path() + "/scans/trufflehog3", file_name) + return get_unit_tests_path() / "scans/trufflehog3" / file_name class TestTruffleHog3Parser(DojoTestCase): diff --git a/unittests/tools/test_trufflehog_parser.py b/unittests/tools/test_trufflehog_parser.py index cfb7a6f86e..899f33fcae 100644 --- a/unittests/tools/test_trufflehog_parser.py +++ b/unittests/tools/test_trufflehog_parser.py @@ -1,4 +1,3 @@ -import os.path from dojo.models import Test from dojo.tools.trufflehog.parser import TruffleHogParser @@ -6,7 +5,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path() + "/scans/trufflehog", file_name) + return get_unit_tests_path() / "scans/trufflehog" / file_name class TestTruffleHogParser(DojoTestCase): diff --git a/unittests/tools/test_trustwave_fusion_api_parser.py b/unittests/tools/test_trustwave_fusion_api_parser.py index c11c1eeb68..93663eeca0 100644 --- a/unittests/tools/test_trustwave_fusion_api_parser.py +++ b/unittests/tools/test_trustwave_fusion_api_parser.py @@ -6,7 +6,7 @@ class TestTrustwaveFusionAPIParser(DojoTestCase): def test_parse_file_with_no_vuln_has_no_findings(self): with open( - get_unit_tests_path() + "/scans/trustwave_fusion_api/trustwave_fusion_api_zero_vul.json", encoding="utf-8", + get_unit_tests_path() / "scans/trustwave_fusion_api/trustwave_fusion_api_zero_vul.json", encoding="utf-8", ) as testfile: parser = TrustwaveFusionAPIParser() findings = parser.get_findings(testfile, Test()) @@ -42,7 +42,7 @@ def test_vuln_with_valid_cve(self): def test_parse_file_with_multiple_vuln_has_multiple_findings(self): with open( - get_unit_tests_path() + "/scans/trustwave_fusion_api/trustwave_fusion_api_many_vul.json", encoding="utf-8", + get_unit_tests_path() / "scans/trustwave_fusion_api/trustwave_fusion_api_many_vul.json", encoding="utf-8", ) as testfile: parser = TrustwaveFusionAPIParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_trustwave_parser.py b/unittests/tools/test_trustwave_parser.py index 8f8d7150eb..b6bc69b6d7 100644 --- a/unittests/tools/test_trustwave_parser.py +++ b/unittests/tools/test_trustwave_parser.py @@ -1,4 +1,3 @@ -import os.path from dojo.models import Engagement, Product, Test from dojo.tools.trustwave.parser import TrustwaveParser @@ -6,7 +5,7 @@ def sample_path(file_name): - return os.path.join(get_unit_tests_path() + "/scans/trustwave", file_name) + return get_unit_tests_path() / "scans/trustwave" / file_name class TestTrustwaveParser(DojoTestCase): diff --git a/unittests/tools/test_twistlock_parser.py b/unittests/tools/test_twistlock_parser.py index b774c70462..724de4550f 100644 --- a/unittests/tools/test_twistlock_parser.py +++ b/unittests/tools/test_twistlock_parser.py @@ -1,4 +1,3 @@ -from os import path from pathlib import Path from dojo.models import Test @@ -8,14 +7,14 @@ class TestTwistlockParser(DojoTestCase): def test_parse_file_with_no_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/twistlock/no_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/twistlock/no_vuln.json", encoding="utf-8") parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(0, len(findings)) def test_parse_file_with_one_vuln(self): - testfile = open(path.join(Path(__file__).parent, "../scans/twistlock/one_vuln.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/twistlock/one_vuln.json", encoding="utf-8") parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -24,7 +23,7 @@ def test_parse_file_with_one_vuln(self): self.assertEqual("CVE-2013-7459", findings[0].unsaved_vulnerability_ids[0]) def test_parse_file_with_no_link(self): - testfile = open(path.join(Path(__file__).parent, "../scans/twistlock/one_vuln_no_link.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/twistlock/one_vuln_no_link.json", encoding="utf-8") parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -33,14 +32,14 @@ def test_parse_file_with_no_link(self): self.assertEqual("PRISMA-2021-0013", findings[0].unsaved_vulnerability_ids[0]) def test_parse_file_with_many_vulns(self): - testfile = open(path.join(Path(__file__).parent, "../scans/twistlock/many_vulns.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/twistlock/many_vulns.json", encoding="utf-8") parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) testfile.close() self.assertEqual(5, len(findings)) def test_parse_file_which_contain_packages_info(self): - testfile = open(path.join(Path(__file__).parent, "../scans/twistlock/findings_include_packages.json"), encoding="utf-8") + testfile = open(Path(__file__).parent / "../scans/twistlock/findings_include_packages.json", encoding="utf-8") parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) testfile.close() @@ -48,7 +47,7 @@ def test_parse_file_which_contain_packages_info(self): def test_parse_file_prisma_twistlock_images_no_vuln(self): testfile = open( - path.join(Path(__file__).parent, "../scans/twistlock/scan_report_prisma_twistlock_images_no_vuln.csv"), encoding="utf-8", + Path(__file__).parent / "../scans/twistlock/scan_report_prisma_twistlock_images_no_vuln.csv", encoding="utf-8", ) parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) @@ -57,7 +56,7 @@ def test_parse_file_prisma_twistlock_images_no_vuln(self): def test_parse_file_prisma_twistlock_images_four_vulns(self): testfile = open( - path.join(Path(__file__).parent, "../scans/twistlock/scan_report_prisma_twistlock_images_four_vulns.csv"), encoding="utf-8", + Path(__file__).parent / "../scans/twistlock/scan_report_prisma_twistlock_images_four_vulns.csv", encoding="utf-8", ) parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) @@ -68,9 +67,8 @@ def test_parse_file_prisma_twistlock_images_four_vulns(self): def test_parse_file_prisma_twistlock_images_long_package_name(self): testfile = open( - path.join( - Path(__file__).parent, "../scans/twistlock/scan_report_prisma_twistlock_images_long_package_name.csv", - ), encoding="utf-8", + Path(__file__).parent / "../scans/twistlock/scan_report_prisma_twistlock_images_long_package_name.csv", + encoding="utf-8", ) parser = TwistlockParser() findings = parser.get_findings(testfile, Test()) diff --git a/unittests/tools/test_xanitizer_parser.py b/unittests/tools/test_xanitizer_parser.py index 22b15010c9..6acf9b0f77 100644 --- a/unittests/tools/test_xanitizer_parser.py +++ b/unittests/tools/test_xanitizer_parser.py @@ -27,7 +27,7 @@ def test_parse_file_with_multiple_findings(self): self.assertEqual("CVE-2015-5211", finding.unsaved_vulnerability_ids[0]) def test_parse_file_with_multiple_findings_no_details(self): - with open(get_unit_tests_path() + "/scans/xanitizer/multiple-findings-no-details.xml", encoding="utf-8") as testfile: + with open(get_unit_tests_path() / "scans/xanitizer/multiple-findings-no-details.xml", encoding="utf-8") as testfile: parser = XanitizerParser() findings = parser.get_findings(testfile, Test()) self.assertEqual(9, len(findings))