Skip to content

Commit

Permalink
DPE-2758 better messaging when no bucket (#350)
Browse files Browse the repository at this point in the history
* ceph test and empty bucket error handling

* lint fixes

* fix path

* bump libs

* reverted and add microceph as a job

since reusable workflow cannot be nested

* fix uceph action call

* missing parameter

* deprecated microceph action

* removed load of envvars

* picking up fix for github_secrets plugin

* bumped dpw on workflows

* removes unneeded variable
  • Loading branch information
paulomach authored Jan 18, 2024
1 parent 8dff5fa commit c290557
Show file tree
Hide file tree
Showing 8 changed files with 102 additions and 51 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ on:
jobs:
lint:
name: Lint
uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/lint.yaml@v9.3.1

unit-test:
name: Unit test charm
Expand Down Expand Up @@ -49,14 +49,14 @@ jobs:
- name: Check libs
uses: canonical/charming-actions/[email protected]
with:
credentials: ${{ secrets.CHARMHUB_TOKEN }} # FIXME: current token will expire in 2023-07-04
credentials: ${{ secrets.CHARMHUB_TOKEN }}
github-token: ${{ secrets.GITHUB_TOKEN }}
use-labels: false
fail-build: ${{ github.event_name == 'pull_request' }}

build:
name: Build charm
uses: canonical/data-platform-workflows/.github/workflows/build_charms_with_cache.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/build_charms_with_cache.yaml@v9.3.1
with:
charmcraft-snap-channel: beta

Expand All @@ -73,7 +73,7 @@ jobs:
- lint
- unit-test
- build
uses: canonical/data-platform-workflows/.github/workflows/integration_test_charm.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/integration_test_charm.yaml@v9.3.1
with:
artifact-name: ${{ needs.build.outputs.artifact-name }}
cloud: lxd
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:

build:
name: Build charm
uses: canonical/data-platform-workflows/.github/workflows/build_charm_without_cache.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/build_charm_without_cache.yaml@v9.3.1
with:
charmcraft-snap-channel: beta

Expand All @@ -44,7 +44,7 @@ jobs:
needs:
- ci-tests
- build
uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v9.3.1
with:
channel: 8.0/edge
artifact-name: ${{ needs.build.outputs.artifact-name }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/sync_issue_to_jira.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
jobs:
sync:
name: Sync GitHub issue to Jira
uses: canonical/data-platform-workflows/.github/workflows/sync_issue_to_jira.yaml@v9.0.2
uses: canonical/data-platform-workflows/.github/workflows/sync_issue_to_jira.yaml@v9.3.1
with:
jira-base-url: https://warthogs.atlassian.net
jira-project-key: DPE
Expand Down
12 changes: 6 additions & 6 deletions lib/charms/mysql/v0/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def is_unit_blocked(self) -> bool:

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 7
LIBPATCH = 8


if typing.TYPE_CHECKING:
Expand Down Expand Up @@ -220,9 +220,11 @@ def _on_list_backups(self, event: ActionEvent) -> None:
logger.info("Listing backups in the specified s3 path")
backups = sorted(list_backups_in_s3_path(s3_parameters), key=lambda pair: pair[0])
event.set_results({"backups": self._format_backups_list(backups)})
except Exception:
error_message = "Failed to retrieve backup ids from S3"
logger.exception(error_message)
except Exception as e:
error_message = (
e.message if hasattr(e, "message") else "Failed to retrieve backup ids from S3"
)
logger.error(error_message)
event.fail(error_message)

# ------------------ Create Backup ------------------
Expand Down Expand Up @@ -632,8 +634,6 @@ def _post_restore(self) -> Tuple[bool, Optional[str]]:
):
return False, "Failed to configure restored instance for InnoDB cluster"

self.charm.unit_peer_data["unit-configured"] = "True"

try:
logger.info("Creating cluster on restored node")
unit_label = self.charm.unit.name.replace("/", "-")
Expand Down
23 changes: 19 additions & 4 deletions lib/charms/mysql/v0/s3_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,11 @@

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 5
LIBPATCH = 6

# botocore/urllib3 clutter the logs when on debug
logging.getLogger("botocore").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)


def upload_content_to_s3(content: str, content_path: str, s3_parameters: Dict) -> bool:
Expand All @@ -55,6 +59,7 @@ def upload_content_to_s3(content: str, content_path: str, s3_parameters: Dict) -
)

s3 = session.resource("s3", endpoint_url=s3_parameters["endpoint"])

bucket = s3.Bucket(s3_parameters["bucket"])

with tempfile.NamedTemporaryFile() as temp_file:
Expand Down Expand Up @@ -89,7 +94,7 @@ def _compile_backups_from_file_ids(
return backups


def list_backups_in_s3_path(s3_parameters: Dict) -> List[Tuple[str, str]]:
def list_backups_in_s3_path(s3_parameters: Dict) -> List[Tuple[str, str]]: # noqa: C901
"""Retrieve subdirectories in an S3 path.
Args:
Expand Down Expand Up @@ -147,9 +152,19 @@ def list_backups_in_s3_path(s3_parameters: Dict) -> List[Tuple[str, str]]:

return _compile_backups_from_file_ids(metadata_ids, md5_ids, log_ids)
except Exception as e:
try:
# botocore raises dynamically generated exceptions
# with a response attribute. We can use this to
# set a more meaningful error message.
if e.response["Error"]["Code"] == "NoSuchBucket":
message = f"Bucket {s3_parameters['bucket']} does not exist"
setattr(e, "message", message)
raise
except (KeyError, AttributeError):
pass
# default handling exposes exception
logger.exception(
f"Failed to list subdirectories in S3 bucket={s3_parameters['bucket']}, path={s3_parameters['path']}",
exc_info=e,
f"Failed to list subdirectories in S3 bucket={s3_parameters['bucket']}, path={s3_parameters['path']}"
)
raise

Expand Down
76 changes: 48 additions & 28 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,11 @@ parameterized = "^0.9.0"

[tool.poetry.group.integration.dependencies]
pytest = "^7.4.0"
pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.0.2", subdirectory = "python/pytest_plugins/github_secrets"}
pytest-github-secrets = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.3.1", subdirectory = "python/pytest_plugins/github_secrets"}
pytest-microceph = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.3.1", subdirectory = "python/pytest_plugins/microceph"}
pytest-operator = "^0.28.0"
pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.0.2", subdirectory = "python/pytest_plugins/pytest_operator_cache"}
pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.0.2", subdirectory = "python/pytest_plugins/pytest_operator_groups"}
pytest-operator-cache = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.3.1", subdirectory = "python/pytest_plugins/pytest_operator_cache"}
pytest-operator-groups = {git = "https://github.com/canonical/data-platform-workflows", tag = "v9.3.1", subdirectory = "python/pytest_plugins/pytest_operator_groups"}
juju = "^3.2.2"
mysql-connector-python = "~8.0.33"
tenacity = "^8.2.2"
Expand Down
21 changes: 18 additions & 3 deletions tests/integration/test_backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@
# See LICENSE file for licensing details.

import logging
import socket
import uuid
from pathlib import Path

import boto3
import pytest
import pytest_microceph
from pytest_operator.plugin import OpsTest

from . import juju_
Expand All @@ -27,7 +29,7 @@
logger = logging.getLogger(__name__)

S3_INTEGRATOR = "s3-integrator"
S3_INTEGRATOR_CHANNEL = "latest/edge"
S3_INTEGRATOR_CHANNEL = "latest/stable"
TIMEOUT = 10 * 60
CLUSTER_ADMIN_USER = "clusteradmin"
CLUSTER_ADMIN_PASSWORD = "clusteradminpassword"
Expand All @@ -43,11 +45,12 @@


@pytest.fixture(scope="session")
def cloud_configs():
def cloud_configs(microceph: pytest_microceph.ConnectionInformation):
# Add UUID to path to avoid conflict with tests running in parallel (e.g. multiple Juju
# versions on a PR, multiple PRs)
path = f"mysql/{uuid.uuid4()}"

host_ip = socket.gethostbyname(socket.gethostname())
return {
"aws": {
"endpoint": "https://s3.amazonaws.com",
Expand All @@ -61,11 +64,19 @@ def cloud_configs():
"path": path,
"region": "",
},
"ceph": {
"endpoint": f"http://{host_ip}",
"bucket": microceph.bucket,
"path": path,
"region": "",
},
}


@pytest.fixture(scope="session")
def cloud_credentials(github_secrets) -> dict[str, dict[str, str]]:
def cloud_credentials(
github_secrets, microceph: pytest_microceph.ConnectionInformation
) -> dict[str, dict[str, str]]:
"""Read cloud credentials."""
return {
"aws": {
Expand All @@ -76,6 +87,10 @@ def cloud_credentials(github_secrets) -> dict[str, dict[str, str]]:
"access-key": github_secrets["GCP_ACCESS_KEY"],
"secret-key": github_secrets["GCP_SECRET_KEY"],
},
"ceph": {
"access-key": microceph.access_key_id,
"secret-key": microceph.secret_access_key,
},
}


Expand Down

0 comments on commit c290557

Please sign in to comment.