Skip to content
This repository has been archived by the owner on Oct 8, 2024. It is now read-only.

Commit

Permalink
Add integration tests (#52)
Browse files Browse the repository at this point in the history
  • Loading branch information
mmkay authored Nov 30, 2023
1 parent 2be9a60 commit e005885
Show file tree
Hide file tree
Showing 9 changed files with 137 additions and 22 deletions.
3 changes: 2 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@ source .tox/unit/bin/activate
tox -e fmt # update your code according to linting rules
tox -e lint # code style
tox -e unit # unit tests
tox -e scenario # scenario tests
tox -e integration # integration tests
tox # runs 'lint' and 'unit' environments
tox # runs 'lint', 'scenario' and 'unit' environments
```

## Build charm
Expand Down
4 changes: 2 additions & 2 deletions HACKING.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@ To manually test and develop tempo:

charmcraft pack
charmcraft pack -p ./tests/integration/tester/
j deploy ./tempo-k8s_ubuntu-22.04-amd64.charm tempo
j deploy ./tempo-k8s_ubuntu-22.04-amd64.charm tempo --resource tempo-image=grafana/tempo:1.5.0

you need to always deploy at least 2 units of tester:

j deploy ./tests/integration/tester/tester-k8s_ubuntu-22.04-amd64.charm tester -n 3
j deploy ./tester_ubuntu-22.04-amd64.charm tester -n 3 --resource workload=python:slim-buster
j relate tempo:tracing tester:tracing

4 changes: 2 additions & 2 deletions lib/charms/tempo_k8s/v0/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def __init__(self, *args):

# Increment this PATCH version before using `charmcraft publish-lib` or reset
# to 0 if you are raising the major API version
LIBPATCH = 7
LIBPATCH = 8

PYDEPS = ["pydantic<2.0"]

Expand Down Expand Up @@ -494,7 +494,7 @@ def is_ready(self, relation: Optional[Relation] = None):
return False
try:
TracingProviderAppData.load(relation.data[relation.app])
except (json.JSONDecodeError, pydantic.ValidationError):
except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError):
logger.info(f"failed validating relation data for {relation}")
return False
return True
Expand Down
15 changes: 15 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Copyright 2021 Canonical Ltd.
# See LICENSE file for licensing details.
import logging
import os
import shutil
from pathlib import Path

import yaml
Expand All @@ -25,3 +27,16 @@ def tempo_metadata(ops_test: OpsTest):
@fixture(scope="module")
def tempo_oci_image(ops_test: OpsTest, tempo_metadata):
return tempo_metadata["resources"]["tempo-image"]["upstream-source"]


@fixture(scope="module", autouse=True)
def copy_tempo_library_into_tester_charm(ops_test):
"""Ensure the tester charm has the libraries it uses."""
libraries = [
"tempo_k8s/v0/charm_tracing.py",
"tempo_k8s/v0/tracing.py",
]
for lib in libraries:
install_path = f"tests/integration/tester/lib/charms/{lib}"
os.makedirs(os.path.dirname(install_path), exist_ok=True)
shutil.copyfile(f"lib/charms/{lib}", install_path)
93 changes: 90 additions & 3 deletions tests/integration/test_integration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,90 @@
def test_integration():
# todo: deploy tester and tempo; relate, check traces.
pass
import asyncio
import logging
from pathlib import Path

import pytest
import yaml
from pytest_operator.plugin import OpsTest

METADATA = yaml.safe_load(Path("./metadata.yaml").read_text())
APP_NAME = METADATA["name"]
TESTER_METADATA = yaml.safe_load(Path("./tests/integration/tester/metadata.yaml").read_text())
TESTER_APP_NAME = TESTER_METADATA["name"]

logger = logging.getLogger(__name__)


@pytest.mark.abort_on_fail
async def test_build_and_deploy(ops_test: OpsTest):
# Given a fresh build of the charm
# When deploying it together with the tester
# Then applications should eventually be created
tempo_charm = await ops_test.build_charm(".")
tester_charm = await ops_test.build_charm("./tests/integration/tester/")
resources = {"tempo-image": METADATA["resources"]["tempo-image"]["upstream-source"]}
resources_tester = {"workload": TESTER_METADATA["resources"]["workload"]["upstream-source"]}

await asyncio.gather(
ops_test.model.deploy(tempo_charm, resources=resources, application_name=APP_NAME),
ops_test.model.deploy(
tester_charm,
resources=resources_tester,
application_name=TESTER_APP_NAME,
num_units=3,
),
)

await asyncio.gather(
ops_test.model.wait_for_idle(
apps=[APP_NAME], status="active", raise_on_blocked=True, timeout=1000
),
# for tester, depending on the result of race with tempo it's either waiting or active
ops_test.model.wait_for_idle(apps=[TESTER_APP_NAME], raise_on_blocked=True, timeout=1000),
)

assert ops_test.model.applications[APP_NAME].units[0].workload_status == "active"


@pytest.mark.abort_on_fail
async def test_relate(ops_test: OpsTest):
# given a deployed charm
# when relating it together with the tester
# then relation should appear
await ops_test.model.add_relation(APP_NAME + ":tracing", TESTER_APP_NAME + ":tracing")
await ops_test.model.wait_for_idle(
apps=[APP_NAME, TESTER_APP_NAME],
status="active",
timeout=1000,
)


@pytest.mark.abort_on_fail
async def test_verify_traces(ops_test: OpsTest):
# given a relation between charms
# when traces endpoint is queried
# then it should contain traces from tester charm
status = await ops_test.model.get_status()
app = status["applications"][APP_NAME]
logger.info(app.public_address)
endpoint = app.public_address + ":3200/api/search"
cmd = [
"curl",
endpoint,
]
rc, stdout, stderr = await ops_test.run(*cmd)
logger.info("%s: %s", endpoint, (rc, stdout, stderr))
assert rc == 0, (
f"curl exited with rc={rc} for {endpoint}; "
f"non-zero return code means curl encountered a >= 400 HTTP code; "
f"cmd={cmd}"
)
assert "TempoTesterCharm" in stdout


@pytest.mark.abort_on_fail
async def test_remove_relation(ops_test: OpsTest):
# given related charms
# when relation is removed
# then both charms should become active again
await ops_test.juju("remove-relation", APP_NAME + ":tracing", TESTER_APP_NAME + ":tracing")
await ops_test.model.wait_for_idle([APP_NAME, TESTER_APP_NAME], status="active")
4 changes: 2 additions & 2 deletions tests/integration/tester/charmcraft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ type: "charm"
bases:
- build-on:
- name: "ubuntu"
channel: "20.04"
channel: "22.04"
run-on:
- name: "ubuntu"
channel: "20.04"
channel: "22.04"
8 changes: 5 additions & 3 deletions tests/integration/tester/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@ resources:
upstream-source: python:slim-buster

provides:
tracing:
interface: tracing
limit: 1
metrics-endpoint:
interface: prometheus_scrape
limit: 1

requires:
tracing:
interface: tracing
limit: 1

peers:
replicas:
interface: replicas
26 changes: 18 additions & 8 deletions tests/integration/tester/src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
from pathlib import Path
from typing import List, Optional

from charms.tempo_k8s.v0.tempo_scrape import TracingEndpointProvider
from charms.prometheus_k8s.v0.prometheus_scrape import MetricsEndpointProvider
from charms.tempo_k8s.v0.charm_tracing import trace_charm
from charms.tempo_k8s.v0.tracing import TracingEndpointRequirer
from ops.charm import CharmBase, PebbleReadyEvent
from ops.main import main
from ops.model import (
Expand All @@ -18,14 +20,11 @@
)
from ops.pebble import Layer

from charms.prometheus_k8s.v0.prometheus_scrape import MetricsEndpointProvider
from charms.tempo_k8s.v0.charm_tracing import trace_charm

logger = logging.getLogger(__name__)
TRACING_APP_NAME = "TempoTesterCharm"


@trace_charm(tempo_endpoint="tempo", service_name=TRACING_APP_NAME)
@trace_charm(tracing_endpoint="tempo_otlp_grpc_endpoint", service_name=TRACING_APP_NAME)
class TempoTesterCharm(CharmBase):
"""Charm the service."""

Expand All @@ -40,7 +39,7 @@ def __init__(self, *args):
self.container: Container = self.unit.get_container(self._container_name)

self.metrics = MetricsEndpointProvider(self)
self.tracing = TracingEndpointProvider(self)
self.tracing = TracingEndpointRequirer(self)
# Core lifecycle events
self.framework.observe(self.on.config_changed, self._update)

Expand Down Expand Up @@ -99,9 +98,9 @@ def _tester_layer(self):
"PORT": self.config["port"],
"HOST": self.config["host"],
"APP_NAME": self.app.name,
"TEMPO_ENDPOINT": self.tracing.otlp_grpc_endpoint or "",
"TEMPO_ENDPOINT": str(self.tracing.otlp_grpc_endpoint) or "",
}
logging.info(f"Initing pebdble layer with env: {str(env)}")
logging.info(f"Initing pebble layer with env: {str(env)}")

if self.unit.name.split("/")[1] == "0":
env["PEERS"] = peers = ";".join(self.peers)
Expand Down Expand Up @@ -220,6 +219,10 @@ def _update(self, _):
self.unit.status = MaintenanceStatus("waiting for IP address...")
return

if not self.tracing.is_ready():
self.unit.status = WaitingStatus("waiting for tracing to be ready...")
return

# In the case of a single unit deployment, no 'RelationJoined' event is emitted, so
# setting IP here.
# Store private address in unit's peer relation data bucket. This is still needed because
Expand Down Expand Up @@ -250,6 +253,13 @@ def _get_peer_addresses(self) -> List[str]:

return addresses

def tempo_otlp_grpc_endpoint(self) -> Optional[str]:
"""Endpoint at which the charm tracing information will be forwarded."""
if self.tracing.is_ready():
return self.tracing.otlp_grpc_endpoint()
else:
return None


if __name__ == "__main__":
main(TempoTesterCharm)
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ deps =
requests
-r{toxinidir}/requirements.txt
commands =
pytest -v --tb native --log-cli-level=INFO -s {posargs} {[vars]tst_path}integration
pytest -v --tb native --log-cli-level=INFO {[vars]tst_path}integration -s {posargs}

[testenv:interface]
description = Run interface tests
Expand Down

0 comments on commit e005885

Please sign in to comment.