Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add unit tests for OpenRelik processor module #945

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions data/recipes/openrelik_ts.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"name": "openrelik_ts",
"short_description": "Processes files from the local file system using OpenRelik. Sends the results to Timesketch.",
"description": "Processes files from the local file system using OpenRelik. Sends the results to Timesketch.",
"test_params": "paths",
"modules": [{
"wants": [],
"name": "FilesystemCollector",
"args": {
"paths": "@paths"
}
}, {
"wants": ["FilesystemCollector"],
"name": "OpenRelikProcessor",
"args": {
"incident_id": "@incident_id",
"sketch_id": "@sketch_id",
"folder_id": "@folder_id",
"workflow_id": "@workflow_id",
"openrelik_api": "@openrelik_api",
"openrelik_ui": "@openrelik_ui",
"openrelik_api_key": "@openrelik_api_key"
}
}, {
"wants": ["OpenRelikProcessor"],
"name": "TimesketchExporter",
"args": {
"incident_id": "@reason",
"token_password": null,
"endpoint": "@timesketch_endpoint",
"username": "@timesketch_username",
"password": "@timesketch_password",
"sketch_id": "@sketch_id",
"analyzers": "@analyzers",
"wait_for_timelines": "@wait_for_timelines"
}
}],
"args": [
["paths", "Comma-separated list of paths to process.", null],
["--incident_id", "Incident ID (used for Timesketch description).", null],
["--sketch_id", "Timesketch sketch to which the timeline should be added.", null, {"format": "integer"}],
["--folder_id", "OpenRelik Folder ID.", null],
["--workflow_id", "OpenRelik workflow template ID.", null],
["--openrelik_api", "OpenRelik API server URI.", "http://localhost:8710"],
["--openrelik_ui", "OpenRelik UI URI.", "http://localhost:8711"],
["--openrelik_api_key", "OpenRelik API key", ""],
["--wait_for_timelines", "Whether to wait for Timesketch to finish processing all timelines.", true],
["--analyzers", "Timesketch analyzers to run", null],
["--token_password", "Optional custom password to decrypt Timesketch credential file with.", ""],
["--timesketch_endpoint", "Timesketch endpoint", "http://localhost:5000/"],
["--timesketch_username", "Username for Timesketch server.", null],
["--timesketch_password", "Password for Timesketch server.", null]
]
}
1 change: 1 addition & 0 deletions dftimewolf/cli/dftimewolf_recipes.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@
'TurbiniaArtifactProcessor': 'dftimewolf.lib.processors.turbinia_artifact',
'TurbiniaGCPProcessor': 'dftimewolf.lib.processors.turbinia_gcp',
'VTCollector' : 'dftimewolf.lib.collectors.virustotal',
'OpenRelikProcessor': 'dftimewolf.lib.processors.openrelik',
'WorkspaceAuditCollector': 'dftimewolf.lib.collectors.workspace_audit',
'WorkspaceAuditTimesketch': 'dftimewolf.lib.processors.workspace_audit_timesketch',
'YetiYaraCollector': 'dftimewolf.lib.collectors.yara'
Expand Down
181 changes: 181 additions & 0 deletions dftimewolf/lib/processors/openrelik.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
"""Processes artifacts using OpenRelik."""

import tempfile
import time
import os

from typing import Type, Iterator

from openrelik_api_client import api_client, folders, workflows

from dftimewolf.lib import module
from dftimewolf.lib import state as state_lib
from dftimewolf.lib.containers import containers, interface
from dftimewolf.lib.modules import manager as modules_manager


# pylint: disable=no-member
class OpenRelikProcessor(module.ThreadAwareModule):
"""Processes artifacts with OpenRelik."""

def __init__(
self,
state: state_lib.DFTimewolfState,
name: str | None = None,
critical: bool = False,
) -> None:
"""Initializes an OpenRelik processor.

Args:
state (DFTimewolfState): recipe state.
name (Optional[str]): The module's runtime name.
critical (Optional[bool]): True if the module is critical, which causes
the entire recipe to fail if the module encounters an error.
"""
super().__init__(state=state, name=name, critical=critical)
self.openrelik_api_client: api_client.APIClient = None
self.openrelik_folder_client: folders.FoldersAPI = None
self.openrelik_workflow_client: workflows.WorkflowsAPI = None
self.openrelik_api: str | None = None
self.openrelik_ui: str | None = None
self.openrelik_api_key: str | None = None
self.workflow_id: int | None = None
self.folder_id: int | None = None
self.sketch_id: int | None = None
self.incident_id: str | None = None

# pylint: disable=arguments-differ
def SetUp(
self,
incident_id: str | None,
sketch_id: int | None,
folder_id: int | None,
workflow_id: int | None,
openrelik_api: str | None,
openrelik_ui: str | None,
openrelik_api_key: str | None,
) -> None:
self.openrelik_api = openrelik_api
self.openrelik_ui = openrelik_ui
self.openrelik_api_key = openrelik_api_key
self.openrelik_api_client = api_client.APIClient(
self.openrelik_api, self.openrelik_api_key
)
self.openrelik_folder_client = folders.FoldersAPI(self.openrelik_api_client)
self.openrelik_workflow_client = workflows.WorkflowsAPI(
self.openrelik_api_client
)

self.folder_id = folder_id
self.workflow_id = workflow_id
self.incident_id = incident_id
self.sketch_id = sketch_id

def PollWorkflowStatus(self, workflow_id: int) -> Iterator[str]:
"""Polls the status of a workflow until it completes."""
filename = str(workflow_id)
workflow = self.openrelik_workflow_client.get_workflow(
self.folder_id, workflow_id
)
status = None
tasks = workflow.get("tasks")
if tasks and len(tasks) > 0:
status = tasks[0].get("status_short")
if not status:
self.ModuleError("Error polling workflow status", critical=True)
self.logger.info(f"Waiting for workflow {workflow_id} to finish.")
output_file_ids = {}
while status not in ("SUCCESS", "FAILED"):
self.logger.debug(f"Workflow {workflow_id} status: {status}")
time.sleep(15)
workflow = self.openrelik_workflow_client.get_workflow(
self.folder_id, workflow_id
)
status = workflow.get("tasks")[0].get("status_short")
self.logger.debug(f"Workflow {workflow_id} status: {status}")
if status == "FAILED":
self.ModuleError(f"Workflow {workflow_id} failed", critical=True)

for task in tasks:
output_files = task.get("output_files", [])
for output_file in output_files:
output_file_id = output_file.get("id")
filename = output_file.get("display_name", workflow_id)
output_file_ids[output_file_id] = filename

for output_file_id, filename in output_file_ids.items():
local_path = self.DownloadWorkflowOutput(output_file_id, filename)
yield local_path

def DownloadWorkflowOutput(self, file_id: int, filename: str) -> str:
"""Downloads a file from OpenRelik.

Args:
file_id: The ID of the file to download.
filename: The name of the file to download.

Returns:
str: The path to the downloaded file.
"""
endpoint = f"{self.openrelik_api_client.base_url}/files/{file_id}/download"
self.logger.info(f"Downloading {filename}, ID:{file_id}")
response = self.openrelik_api_client.session.get(endpoint)
filename_prefix, extension = os.path.splitext(filename)
file = tempfile.NamedTemporaryFile(
mode="wb", prefix=f"{filename_prefix}", suffix=extension, delete=False
)
local_path = file.name
self.PublishMessage(f"Saving output for file ID {file_id} to {local_path}")
file.write(response.text.encode("utf-8"))
file.close()
return local_path

# pytype: disable=signature-mismatch
def Process(self, container: containers.File) -> None:
file_ids = []
folder_id = self.folder_id
if not folder_id or not self.openrelik_folder_client.folder_exists(
folder_id
):
folder_id = self.openrelik_folder_client.create_root_folder(
f"{self.incident_id}"
)
self.logger.info(f"Created folder {folder_id}")

self.logger.info(f"Uploading file {container.path}")
file_id = self.openrelik_api_client.upload_file(container.path, folder_id)
if file_id:
self.logger.info(f"Uploaded file {container.path}")
file_ids.append(file_id)

workflow_id = self.openrelik_workflow_client.create_workflow(
folder_id, file_ids, self.workflow_id
)
workflow_url = f"{self.openrelik_ui}/folder/{folder_id}"
self.PublishMessage(
f"New workflow ID {workflow_id} can be viewed at: {workflow_url}"
)
self.openrelik_workflow_client.run_workflow(folder_id, workflow_id)
for local_path in self.PollWorkflowStatus(workflow_id):
fs_container = containers.File(path=local_path, name=local_path)
self.StreamContainer(fs_container)

@staticmethod
def GetThreadOnContainerType() -> Type[interface.AttributeContainer]:
return containers.File

def GetThreadPoolSize(self) -> int:
return 3

@staticmethod
def KeepThreadedContainersInState() -> bool:
return False

def PreProcess(self) -> None:
pass

def PostProcess(self) -> None:
pass


modules_manager.ModulesManager.RegisterModule(OpenRelikProcessor)
4 changes: 2 additions & 2 deletions dftimewolf/metawolf/metawolf.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def do_run(self, _: cmd2.Statement) -> None:
help='Show past and current recipe runs.')
show_parser.add_argument('-o', ARG_OUTPUT, nargs='?', type=int,
help='Show the output of a recipe run.')
@cmd2.with_argparser(show_parser) # type: ignore
@cmd2.with_argparser(show_parser)
def do_show(self, args: argparse.Namespace) -> None:
"""Show sessions, recipes, runs and outputs. `help show` for details.

Expand Down Expand Up @@ -568,7 +568,7 @@ def read_output(process: output.MetawolfProcess) -> None:
send_parser.add_argument('-i', ARG_INPUT, type=str,
help='The input to send to the process\' STDIN.'
'Input must be between quotes.')
@cmd2.with_argparser(send_parser) # type: ignore
@cmd2.with_argparser(send_parser)
def do_send(self, args: argparse.Namespace) -> None:
"""Communicate with running recipes.

Expand Down
Loading
Loading