Skip to content

Commit

Permalink
Revert "feat(general): add rustworkx (#5511)"
Browse files Browse the repository at this point in the history
This reverts commit ef1433e.
  • Loading branch information
rotemavni committed Sep 13, 2023
1 parent 5721569 commit cd6a206
Show file tree
Hide file tree
Showing 78 changed files with 199 additions and 674 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/pr-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,6 @@ jobs:
mypy:
uses: bridgecrewio/gha-reusable-workflows/.github/workflows/mypy.yaml@main
with:
python-version: "3.8"

unit-tests:
strategy:
Expand Down
1 change: 0 additions & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ yarl = "*"
openai = "*"
spdx-tools = ">=0.8.0,<0.9.0"
license-expression = "*"
rustworkx = "*"

[requires]
python_version = "3.7"
207 changes: 24 additions & 183 deletions Pipfile.lock

Large diffs are not rendered by default.

23 changes: 4 additions & 19 deletions checkov/common/bridgecrew/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

import dpath
from igraph import Graph
from rustworkx import PyDiGraph, digraph_node_link_json # type: ignore

try:
from networkx import DiGraph, node_link_data
Expand All @@ -20,7 +19,6 @@
DiGraph = str
node_link_data = lambda G : {}


from checkov.common.bridgecrew.check_type import CheckType
from checkov.common.models.consts import SUPPORTED_FILE_EXTENSIONS
from checkov.common.typing import _ReducedScanReport
Expand All @@ -40,10 +38,6 @@
secrets_check_reduced_keys = check_reduced_keys + ('validation_status',)
check_metadata_keys = ('evaluations', 'code_block', 'workflow_name', 'triggers', 'job')

FILE_NAME_NETWORKX = 'graph_networkx.json'
FILE_NAME_IGRAPH = 'graph_igraph.json'
FILE_NAME_RUSTWORKX = 'graph_rustworkx.json'


def _is_scanned_file(file: str) -> bool:
file_ending = os.path.splitext(file)[1]
Expand Down Expand Up @@ -152,24 +146,15 @@ def enrich_and_persist_checks_metadata(
return checks_metadata_paths


def persist_graphs(
graphs: dict[str, DiGraph | Graph | PyDiGraph[Any, Any]],
s3_client: S3Client,
bucket: str,
full_repo_object_key: str,
timeout: int,
absolute_root_folder: str = '',
) -> None:
def persist_graphs(graphs: dict[str, DiGraph | Graph], s3_client: S3Client, bucket: str, full_repo_object_key: str,
timeout: int, absolute_root_folder: str = '') -> None:
def _upload_graph(check_type: str, graph: DiGraph | Graph, _absolute_root_folder: str = '') -> None:
if isinstance(graph, DiGraph):
json_obj = node_link_data(graph)
graph_file_name = FILE_NAME_NETWORKX
graph_file_name = 'graph_networkx.json'
elif isinstance(graph, Graph):
json_obj = serialize_to_json(graph, _absolute_root_folder)
graph_file_name = FILE_NAME_IGRAPH
elif isinstance(graph, PyDiGraph):
json_obj = digraph_node_link_json(graph)
graph_file_name = FILE_NAME_RUSTWORKX
graph_file_name = 'graph_igraph.json'
else:
logging.error(f"unsupported graph type '{graph.__class__.__name__}'")
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

from igraph import Graph
from bc_jsonpath_ng.ext import parse
from networkx import DiGraph

from checkov.common.graph.checks_infra import debug
from checkov.common.graph.checks_infra.enums import SolverType
Expand Down Expand Up @@ -66,17 +65,8 @@ def run(self, graph_connector: LibraryGraph) -> Tuple[List[Dict[str, Any]], List
failed_vertices.append(data)

return passed_vertices, failed_vertices, unknown_vertices
elif isinstance(graph_connector, DiGraph):
for _, data in graph_connector.nodes(data=True):
if (not self.resource_types or data.get(CustomAttributes.RESOURCE_TYPE) in self.resource_types) \
and data.get(CustomAttributes.BLOCK_TYPE) in SUPPORTED_BLOCK_TYPES:
jobs.append(executer.submit(
self._process_node, data, passed_vertices, failed_vertices, unknown_vertices))

concurrent.futures.wait(jobs)
return passed_vertices, failed_vertices, unknown_vertices

for _, data in graph_connector.nodes():
for _, data in graph_connector.nodes(data=True):
if (not self.resource_types or data.get(CustomAttributes.RESOURCE_TYPE) in self.resource_types) \
and data.get(CustomAttributes.BLOCK_TYPE) in SUPPORTED_BLOCK_TYPES:
jobs.append(executer.submit(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import List, Any, Tuple, Dict, TYPE_CHECKING, Optional

from igraph import Graph
from networkx import DiGraph

from checkov.common.graph.checks_infra import debug
from checkov.common.graph.checks_infra.enums import SolverType
Expand Down Expand Up @@ -60,19 +59,8 @@ def run(self, graph_connector: LibraryGraph) -> Tuple[List[Dict[str, Any]], List
)

return passed_vertices, failed_vertices, unknown_vertices
elif isinstance(graph_connector, DiGraph):
for _, data in graph_connector.nodes(data=True):
if self.resource_type_pred(data, self.resource_types):
result = self.get_operation(data)
if result is None:
unknown_vertices.append(data)
elif result:
passed_vertices.append(data)
else:
failed_vertices.append(data)
return passed_vertices, failed_vertices, unknown_vertices

for _, data in graph_connector.nodes():
for _, data in graph_connector.nodes(data=True):
if self.resource_type_pred(data, self.resource_types):
result = self.get_operation(data)
if result is None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import Any, List, Dict, Optional, Tuple, TYPE_CHECKING

from igraph import Graph
from networkx import DiGraph

from checkov.common.graph.checks_infra.enums import SolverType
from checkov.common.graph.checks_infra.solvers.base_solver import BaseSolver
Expand Down Expand Up @@ -62,24 +61,14 @@ def set_vertices(self, graph_connector: LibraryGraph, exclude_vertices: List[Dic
self.vertices_under_connected_resources_types = [
data for data in graph_connector.vs.select(resource_type_in=self.connected_resources_types)["attr"]
]
elif isinstance(graph_connector, DiGraph):
else:
self.vertices_under_resource_types = [
v for _, v in graph_connector.nodes(data=True) if self.resource_type_pred(v, self.resource_types)
]
self.vertices_under_connected_resources_types = [
v for _, v in graph_connector.nodes(data=True) if self.resource_type_pred(v, self.connected_resources_types)
]

# isinstance(graph_connector, PyDiGraph):
else:
self.vertices_under_resource_types = [
v for _, v in graph_connector.nodes() if self.resource_type_pred(v, self.resource_types)
]
self.vertices_under_connected_resources_types = [
v for _, v in graph_connector.nodes() if
self.resource_type_pred(v, self.connected_resources_types)
]

self.excluded_vertices = [
v
for v in itertools.chain(self.vertices_under_resource_types, self.vertices_under_connected_resources_types)
Expand All @@ -103,7 +92,7 @@ def reduce_graph_by_target_types(self, graph_connector: LibraryGraph) -> Library
connection_nodes = {
vertex for vertex in graph_connector.vs.select(block_type__in=BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES)
}
elif isinstance(graph_connector, DiGraph):
else:
resource_nodes = {
node
for node, resource_type in graph_connector.nodes(data=CustomAttributes.RESOURCE_TYPE)
Expand All @@ -117,24 +106,9 @@ def reduce_graph_by_target_types(self, graph_connector: LibraryGraph) -> Library
if block_type in BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES
}

# isinstance(graph_connector, PyDiGraph):
else:
resource_nodes = {
index
for index, node in graph_connector.nodes()
if self.resource_type_pred(node, list(self.targeted_resources_types))
}

# tuple needs to be adjusted, if more connection block types are supported
connection_nodes = {
index
for index, node in graph_connector.nodes()
if node['block_type_'] in BaseConnectionSolver.SUPPORTED_CONNECTION_BLOCK_TYPES
}

resource_nodes.update(connection_nodes)

return graph_connector.subgraph(list(resource_nodes))
return graph_connector.subgraph(resource_nodes)

def populate_checks_results(self, origin_attributes: Dict[str, Any], destination_attributes: Dict[str, Any], passed: List[Dict[str, Any]], failed: List[Dict[str, Any]], unknown: List[Dict[str, Any]]) -> None:
if origin_attributes in self.excluded_vertices or destination_attributes in self.excluded_vertices:
Expand Down
Loading

0 comments on commit cd6a206

Please sign in to comment.