Skip to content

More readable error message on missing files for capture ops #75

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions gateway/makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Build and manage a local deploy of the SDS Gateway - NOT PRODUCTION READY

.PHONY: all redeploy build build-full up logs logs-once down pre-commit restart \
serve-coverage test update
.PHONY: all redeploy build build-full down logs logs-once pre-commit restart \
serve-coverage test up update

all: build up logs
redeploy: build down up logs
Expand All @@ -11,19 +11,19 @@ APP_CONTAINER := sds-gateway-local-app
COMPOSE_FILE := compose.local.yaml
ENV_FILE := .envs/local/opensearch.env

build-full:
@echo "Pulling and building sds-gateway WITHOUT CACHE"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) pull --ignore-buildable
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) build --no-cache $(ARGS)

build:
@echo "Pulling and building sds-gateway"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) pull --ignore-buildable
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) build $(ARGS)

up:
@echo "Starting sds-gateway"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) up -d --remove-orphans $(ARGS)
build-full:
@echo "Pulling and building sds-gateway WITHOUT CACHE"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) pull --ignore-buildable
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) build --no-cache $(ARGS)

down:
@echo "Stopping sds-gateway"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) down $(ARGS)

logs:
@echo "Showing sds-gateway logs…"
Expand All @@ -33,10 +33,6 @@ logs-once:
@echo "Showing gateway logs once…"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) logs $(ARGS)

down:
@echo "Stopping sds-gateway"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) down $(ARGS)

pre-commit:
uv run --dev pre-commit run --all-files

Expand All @@ -59,6 +55,10 @@ test:
@# Django's test runner: obsolete, subset of pytest tests, left as reference.
@# @COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) run $(APP_CONTAINER) python manage.py test --no-input --force-color --verbosity 0

up:
@echo "Starting sds-gateway"
@COMPOSE_FILE=$(COMPOSE_FILE) docker compose --env-file $(ENV_FILE) up -d --remove-orphans $(ARGS)

update:
@# uv sync --upgrade # re-enable when uv integration is done
uv run pre-commit autoupdate
23 changes: 17 additions & 6 deletions gateway/sds_gateway/api_methods/tests/test_opensearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@ def setUp(self) -> None:

# Setup test data and create initial capture
self._setup_test_data()
self.capture = self._create_test_capture(self.user, self.top_level_dir)
self.capture = self._create_test_capture(
owner=self.user, top_level_dir=self.top_level_dir
)
self._initialize_test_index()
self._index_test_capture(self.capture)

Expand Down Expand Up @@ -152,6 +154,7 @@ def _setup_test_data(self) -> None:

def _create_test_capture(self, owner: User, top_level_dir: str) -> Capture:
"""Create and index a test capture."""
# TODO: create files for this capture
return Capture.objects.create(
owner=owner,
scan_group=self.scan_group,
Expand All @@ -165,7 +168,7 @@ def _create_test_file(self, owner: User) -> File:
json_content = json.dumps(self.json_file).encode("utf-8")
self.uploaded_file = SimpleUploadedFile(
"test.rh.json",
json_content,
content=json_content,
content_type="application/json",
)

Expand Down Expand Up @@ -388,7 +391,9 @@ def test_duplicate_capture_deletion(self) -> None:
assert initial_response["hits"]["total"]["value"] == 1

# Create duplicate capture
duplicate_capture = self._create_test_capture(self.user, self.top_level_dir)
duplicate_capture = self._create_test_capture(
owner=self.user, top_level_dir=self.top_level_dir
)
self._index_test_capture(duplicate_capture)

# Verify duplicate capture was created
Expand Down Expand Up @@ -508,7 +513,9 @@ def setUp(self) -> None:

# setup test data and create initial capture
self._setup_test_data()
self.capture = self._create_test_capture(self.user, self.top_level_dir)
self.capture = self._create_test_capture(
owner=self.user, top_level_dir=self.top_level_dir
)
self._initialize_test_index()
self._index_test_capture(self.capture)

Expand Down Expand Up @@ -792,7 +799,9 @@ def test_duplicate_capture_deletion(self) -> None:
assert initial_response["hits"]["total"]["value"] == 1

# Create duplicate capture
duplicate_capture = self._create_test_capture(self.user, self.top_level_dir)
duplicate_capture = self._create_test_capture(
owner=self.user, top_level_dir=self.top_level_dir
)
self._index_test_capture(duplicate_capture)

# Verify duplicate capture was created
Expand Down Expand Up @@ -832,7 +841,9 @@ def test_no_capture_deletion_multiple_owners(self) -> None:
other_top_level_dir = f"/files/{other_user.email}/{self.channel}"
expected_count = 2
# Create a duplicate capture for the second user
duplicate_capture = self._create_test_capture(other_user, other_top_level_dir)
duplicate_capture = self._create_test_capture(
owner=other_user, top_level_dir=other_top_level_dir
)
self._index_test_capture(duplicate_capture)

# Get initial document
Expand Down
62 changes: 46 additions & 16 deletions gateway/sds_gateway/api_methods/views/capture_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ def ingest_capture(
requester: The user making the request
rh_scan_group: Optional scan group UUID for RH captures
top_level_dir: Path to directory containing files to connect to capture
Raise:
FileNotFoundError: If there are no files to connect to this capture
"""
# check if the top level directory was passed
if not top_level_dir:
Expand All @@ -147,6 +149,14 @@ def ingest_capture(
verbose=True,
)

if not files_to_connect:
msg = (
f"No files found for '{top_level_dir}' "
f"matching capture '{capture.capture_type}'"
)
log.warning(msg)
raise FileNotFoundError(msg)

# try to validate and index metadata before connecting files
self._validate_and_index_metadata(
capture=capture,
Expand Down Expand Up @@ -203,7 +213,7 @@ def ingest_capture(
),
summary="Create Capture",
)
def create(self, request: Request) -> Response: # noqa: PLR0911
def create(self, request: Request) -> Response: # noqa: C901, PLR0911
"""Create a capture object, connecting files and indexing the metadata."""
drf_channel = request.data.get("channel", None)
rh_scan_group = request.data.get("scan_group", None)
Expand Down Expand Up @@ -279,21 +289,31 @@ def create(self, request: Request) -> Response: # noqa: PLR0911
requester=requester,
top_level_dir=requested_top_level_dir,
)
except UnknownIndexError as e:
user_msg = f"Unknown index: '{e}'. Try recreating this capture."
except UnknownIndexError as err:
user_msg = f"Unknown index: '{err}'. Try recreating this capture."
server_msg = (
f"Unknown index: '{e}'. Try running the init_indices "
f"Unknown index: '{err}'. Try running the init_indices "
"subcommand if this is index should exist."
)
log.error(server_msg)
capture.soft_delete()
return Response({"detail": user_msg}, status=status.HTTP_400_BAD_REQUEST)
except ValueError as e:
user_msg = f"Error handling metadata for capture '{capture.uuid}': {e}"
except FileNotFoundError as err:
user_msg = (
"Could not find relevant files to create capture. "
f"Please, check if your files are in '{unsafe_top_level_dir}' "
"under SDS. You can list the files in this directory to verify."
f" {err}"
)
log.warning(user_msg)
capture.soft_delete()
return Response({"detail": user_msg}, status=status.HTTP_400_BAD_REQUEST)
except ValueError as err:
user_msg = f"Error handling metadata for capture '{capture.uuid}': {err}"
capture.soft_delete()
return Response({"detail": user_msg}, status=status.HTTP_400_BAD_REQUEST)
except os_exceptions.ConnectionError as e:
user_msg = f"Error connecting to OpenSearch: {e}"
except os_exceptions.ConnectionError as err:
user_msg = f"Error connecting to OpenSearch: {err}"
log.error(user_msg)
capture.soft_delete()
return Response(status=status.HTTP_503_SERVICE_UNAVAILABLE)
Expand Down Expand Up @@ -496,7 +516,7 @@ def list(self, request: Request) -> Response:
),
summary="Update Capture",
)
def update(self, request: Request, pk: str | None = None) -> Response:
def update(self, request: Request, pk: str | None = None) -> Response: # noqa: PLR0911
"""Update a capture by adding files or re-indexing metadata."""
if pk is None:
return Response(
Expand Down Expand Up @@ -530,19 +550,29 @@ def update(self, request: Request, pk: str | None = None) -> Response:
requester=owner,
top_level_dir=requested_top_level_dir,
)
except UnknownIndexError as e:
user_msg = f"Unknown index: '{e}'. Try recreating this capture."
except UnknownIndexError as err:
user_msg = f"Unknown index: '{err}'. Try recreating this capture."
server_msg = (
f"Unknown index: '{e}'. Try running the init_indices "
f"Unknown index: '{err}'. Try running the init_indices "
"subcommand if this is index should exist."
)
log.error(server_msg)
return Response({"detail": user_msg}, status=status.HTTP_400_BAD_REQUEST)
except ValueError as e:
msg = f"Error handling metadata for capture '{target_capture.uuid}': {e}"
except FileNotFoundError as err:
user_msg = (
"Could not find relevant files to update capture. "
"Please, check if your files are still in "
f"'{target_capture.top_level_dir}' "
"under SDS. You can list the files in this directory to verify."
f" {err}"
)
log.warning(user_msg)
return Response({"detail": user_msg}, status=status.HTTP_400_BAD_REQUEST)
except ValueError as err:
msg = f"Error handling metadata for capture '{target_capture.uuid}': {err}"
return Response({"detail": msg}, status=status.HTTP_400_BAD_REQUEST)
except os_exceptions.ConnectionError as e:
msg = f"Error connecting to OpenSearch: {e}"
except os_exceptions.ConnectionError as err:
msg = f"Error connecting to OpenSearch: {err}"
log.error(msg)
return Response(status=status.HTTP_503_SERVICE_UNAVAILABLE)

Expand Down
2 changes: 1 addition & 1 deletion sdk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,13 @@
# https://docs.astral.sh/ruff/settings/#lint_ignore
"COM812", # disabled following ruff's recommendation
"ISC001", # disabled following ruff's recommendation
# "N811", # Constant imports aliased to non-constant-style names (false positives with django)
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
"S101", # Use of assert detected https://docs.astral.sh/ruff/rules/assert/
"S104", # Possible binding to all interfaces
"SIM102", # sometimes it's better to nest
"UP038", # Checks for uses of isinstance/issubclass that take a tuple
# of types for comparison.
# "N811", # Constant imports aliased to non-constant-style names (false positives with django)
# UP038 deactivated because it can make the code slow:
# https://github.com/astral-sh/ruff/issues/7871
]
Expand Down