diff --git a/charmcraft.yaml b/charmcraft.yaml index ad101233..2c3aab82 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -113,7 +113,7 @@ provides: self-profiling-endpoint: interface: parca_scrape description: | - The profiling endpoint at which profiles for this parca instance are served. + Endpoint to allow this parca instance profiles to be scraped by a remote parca instance. parca-store-endpoint: interface: parca_store description: | diff --git a/lib/charms/observability_libs/v0/juju_topology.py b/lib/charms/observability_libs/v0/juju_topology.py deleted file mode 100644 index a79e5d43..00000000 --- a/lib/charms/observability_libs/v0/juju_topology.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright 2022 Canonical Ltd. -# See LICENSE file for licensing details. -"""## Overview. - -This document explains how to use the `JujuTopology` class to -create and consume topology information from Juju in a consistent manner. - -The goal of the Juju topology is to uniquely identify a piece -of software running across any of your Juju-managed deployments. -This is achieved by combining the following four elements: - -- Model name -- Model UUID -- Application name -- Unit identifier - - -For a more in-depth description of the concept, as well as a -walk-through of it's use-case in observability, see -[this blog post](https://juju.is/blog/model-driven-observability-part-2-juju-topology-metrics) -on the Juju blog. - -## Library Usage - -This library may be used to create and consume `JujuTopology` objects. -The `JujuTopology` class provides three ways to create instances: - -### Using the `from_charm` method - -Enables instantiation by supplying the charm as an argument. When -creating topology objects for the current charm, this is the recommended -approach. - -```python -topology = JujuTopology.from_charm(self) -``` - -### Using the `from_dict` method - -Allows for instantion using a dictionary of relation data, like the -`scrape_metadata` from Prometheus or the labels of an alert rule. When -creating topology objects for remote charms, this is the recommended -approach. - -```python -scrape_metadata = json.loads(relation.data[relation.app].get("scrape_metadata", "{}")) -topology = JujuTopology.from_dict(scrape_metadata) -``` - -### Using the class constructor - -Enables instantiation using whatever values you want. While this -is useful in some very specific cases, this is almost certainly not -what you are looking for as setting these values manually may -result in observability metrics which do not uniquely identify a -charm in order to provide accurate usage reporting, alerting, -horizontal scaling, or other use cases. - -```python -topology = JujuTopology( - model="some-juju-model", - model_uuid="00000000-0000-0000-0000-000000000001", - application="fancy-juju-application", - unit="fancy-juju-application/0", - charm_name="fancy-juju-application-k8s", -) -``` - -""" -from collections import OrderedDict -from typing import Dict, List, Optional -from uuid import UUID - -# The unique Charmhub library identifier, never change it -LIBID = "bced1658f20f49d28b88f61f83c2d232" - -LIBAPI = 0 -LIBPATCH = 6 - - -class InvalidUUIDError(Exception): - """Invalid UUID was provided.""" - - def __init__(self, uuid: str): - self.message = "'{}' is not a valid UUID.".format(uuid) - super().__init__(self.message) - - -class JujuTopology: - """JujuTopology is used for storing, generating and formatting juju topology information. - - DEPRECATED: This class is deprecated. Use `pip install cosl` and - `from cosl.juju_topology import JujuTopology` instead. - """ - - def __init__( - self, - model: str, - model_uuid: str, - application: str, - unit: Optional[str] = None, - charm_name: Optional[str] = None, - ): - """Build a JujuTopology object. - - A `JujuTopology` object is used for storing and transforming - Juju topology information. This information is used to - annotate Prometheus scrape jobs and alert rules. Such - annotation when applied to scrape jobs helps in identifying - the source of the scrapped metrics. On the other hand when - applied to alert rules topology information ensures that - evaluation of alert expressions is restricted to the source - (charm) from which the alert rules were obtained. - - Args: - model: a string name of the Juju model - model_uuid: a globally unique string identifier for the Juju model - application: an application name as a string - unit: a unit name as a string - charm_name: name of charm as a string - """ - if not self.is_valid_uuid(model_uuid): - raise InvalidUUIDError(model_uuid) - - self._model = model - self._model_uuid = model_uuid - self._application = application - self._charm_name = charm_name - self._unit = unit - - def is_valid_uuid(self, uuid): - """Validate the supplied UUID against the Juju Model UUID pattern. - - Args: - uuid: string that needs to be checked if it is valid v4 UUID. - - Returns: - True if parameter is a valid v4 UUID, False otherwise. - """ - try: - return str(UUID(uuid, version=4)) == uuid - except (ValueError, TypeError): - return False - - @classmethod - def from_charm(cls, charm): - """Creates a JujuTopology instance by using the model data available on a charm object. - - Args: - charm: a `CharmBase` object for which the `JujuTopology` will be constructed - Returns: - a `JujuTopology` object. - """ - return cls( - model=charm.model.name, - model_uuid=charm.model.uuid, - application=charm.model.app.name, - unit=charm.model.unit.name, - charm_name=charm.meta.name, - ) - - @classmethod - def from_dict(cls, data: dict): - """Factory method for creating `JujuTopology` children from a dictionary. - - Args: - data: a dictionary with five keys providing topology information. The keys are - - "model" - - "model_uuid" - - "application" - - "unit" - - "charm_name" - `unit` and `charm_name` may be empty, but will result in more limited - labels. However, this allows us to support charms without workloads. - - Returns: - a `JujuTopology` object. - """ - return cls( - model=data["model"], - model_uuid=data["model_uuid"], - application=data["application"], - unit=data.get("unit", ""), - charm_name=data.get("charm_name", ""), - ) - - def as_dict( - self, - *, - remapped_keys: Optional[Dict[str, str]] = None, - excluded_keys: Optional[List[str]] = None, - ) -> OrderedDict: - """Format the topology information into an ordered dict. - - Keeping the dictionary ordered is important to be able to - compare dicts without having to resort to deep comparisons. - - Args: - remapped_keys: A dictionary mapping old key names to new key names, - which will be substituted when invoked. - excluded_keys: A list of key names to exclude from the returned dict. - uuid_length: The length to crop the UUID to. - """ - ret = OrderedDict( - [ - ("model", self.model), - ("model_uuid", self.model_uuid), - ("application", self.application), - ("unit", self.unit), - ("charm_name", self.charm_name), - ] - ) - if excluded_keys: - ret = OrderedDict({k: v for k, v in ret.items() if k not in excluded_keys}) - - if remapped_keys: - ret = OrderedDict( - (remapped_keys.get(k), v) if remapped_keys.get(k) else (k, v) for k, v in ret.items() # type: ignore - ) - - return ret - - @property - def identifier(self) -> str: - """Format the topology information into a terse string. - - This crops the model UUID, making it unsuitable for comparisons against - anything but other identifiers. Mainly to be used as a display name or file - name where long strings might become an issue. - - >>> JujuTopology( \ - model = "a-model", \ - model_uuid = "00000000-0000-4000-8000-000000000000", \ - application = "some-app", \ - unit = "some-app/1" \ - ).identifier - 'a-model_00000000_some-app' - """ - parts = self.as_dict( - excluded_keys=["unit", "charm_name"], - ) - - parts["model_uuid"] = self.model_uuid_short - values = parts.values() - - return "_".join([str(val) for val in values]).replace("/", "_") - - @property - def label_matcher_dict(self) -> Dict[str, str]: - """Format the topology information into a dict with keys having 'juju_' as prefix. - - Relabelled topology never includes the unit as it would then only match - the leader unit (ie. the unit that produced the dict). - """ - items = self.as_dict( - remapped_keys={"charm_name": "charm"}, - excluded_keys=["unit"], - ).items() - - return {"juju_{}".format(key): value for key, value in items if value} - - @property - def label_matchers(self) -> str: - """Format the topology information into a promql/logql label matcher string. - - Topology label matchers should never include the unit as it - would then only match the leader unit (ie. the unit that - produced the matchers). - """ - items = self.label_matcher_dict.items() - return ", ".join(['{}="{}"'.format(key, value) for key, value in items if value]) - - @property - def model(self) -> str: - """Getter for the juju model value.""" - return self._model - - @property - def model_uuid(self) -> str: - """Getter for the juju model uuid value.""" - return self._model_uuid - - @property - def model_uuid_short(self) -> str: - """Getter for the juju model value, truncated to the first eight letters.""" - return self._model_uuid[:8] - - @property - def application(self) -> str: - """Getter for the juju application value.""" - return self._application - - @property - def charm_name(self) -> Optional[str]: - """Getter for the juju charm name value.""" - return self._charm_name - - @property - def unit(self) -> Optional[str]: - """Getter for the juju unit value.""" - return self._unit diff --git a/lib/charms/parca_k8s/v0/parca_scrape.py b/lib/charms/parca_k8s/v0/parca_scrape.py index f5d587a8..9dec7c15 100644 --- a/lib/charms/parca_k8s/v0/parca_scrape.py +++ b/lib/charms/parca_k8s/v0/parca_scrape.py @@ -171,10 +171,11 @@ def _on_scrape_targets_changed(self, event): import json import logging import socket +from cosl import JujuTopology from typing import List, Optional, Union +from ops.model import Relation import ops -from charms.observability_libs.v0.juju_topology import JujuTopology # The unique Charmhub library identifier, never change it LIBID = "dbc3d2e89cb24917b99c40e14354dd25" @@ -184,7 +185,7 @@ def _on_scrape_targets_changed(self, event): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 3 +LIBPATCH = 4 logger = logging.getLogger(__name__) @@ -811,6 +812,22 @@ def _is_valid_unit_address(self, address: str) -> bool: except ValueError: return False + @property + def _relations(self) -> List[Relation]: + """The relations currently active on this endpoint.""" + return self._charm.model.relations[self._relation_name] + + def is_ready(self, relation: Optional[Relation] = None) -> bool: + """Check if the relation(s) on this endpoint are ready.""" + relations = [relation] if relation else self._relations + + if not relations: + logger.debug(f"no relation on {self._relation_name!r}.") + return False + + # TODO: once we have a pydantic model, we can also check for the integrity of the databags. + return all((relation.app and relation.data) for relation in relations) + @property def _scrape_jobs(self) -> list: """Fetch list of scrape jobs. diff --git a/src/charm.py b/src/charm.py index f08007cd..95869b0d 100755 --- a/src/charm.py +++ b/src/charm.py @@ -8,7 +8,7 @@ import socket import typing from pathlib import Path -from typing import FrozenSet, List, Optional +from typing import Dict, FrozenSet, List, Optional from urllib.parse import urlparse import ops @@ -31,6 +31,7 @@ TLSCertificatesRequiresV4, ) from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer +from cosl import JujuTopology from models import S3Config, TLSConfig from nginx import ( @@ -38,7 +39,7 @@ Nginx, ) from nginx_prometheus_exporter import NginxPrometheusExporter -from parca import Parca, ScrapeJob, ScrapeJobsConfig +from parca import Parca, RelabelConfig, ScrapeJob, ScrapeJobsConfig logger = logging.getLogger(__name__) @@ -51,6 +52,19 @@ # we can ask s3 for a bucket name, but we may get back a different one PREFERRED_BUCKET_NAME = "parca" +RELABEL_CONFIG = [ + { + "source_labels": [ + "juju_model", + "juju_model_uuid", + "juju_application", + "juju_unit", + ], + "separator": "_", + "target_label": "instance", + "regex": "(.*)", + } +] @trace_charm( @@ -95,6 +109,7 @@ def __init__(self, *args): external_url=self._external_url, refresh_event=[self.certificates.on.certificate_available], ) + self.self_profiling_endpoint_provider = ProfilingEndpointProvider( self, jobs=self._self_profiling_scrape_jobs, @@ -138,9 +153,17 @@ def __init__(self, *args): # WORKLOADS # these need to be instantiated after `ingress` is, as it accesses self._external_url_path + self.nginx = Nginx( + container=self.unit.get_container(Nginx.container_name), + server_name=self._fqdn, + address=Address(name="parca", port=Parca.port), + path_prefix=self._external_url_path, + tls_config=self._tls_config, + ) + # parca needs to be instantiated after `nginx`, as it accesses self.nginx.port self.parca = Parca( container=self.unit.get_container(Parca.container_name), - scrape_configs=self.profiling_consumer.jobs(), + scrape_configs=self._profiling_scrape_configs, enable_persistence=typing.cast(bool, self.config.get("enable-persistence", None)), memory_storage_limit=typing.cast(int, self.config.get("memory-storage-limit", None)), store_config=self.store_requirer.config, @@ -152,13 +175,6 @@ def __init__(self, *args): container=self.unit.get_container(NginxPrometheusExporter.container_name), nginx_port=Nginx.port, ) - self.nginx = Nginx( - container=self.unit.get_container(Nginx.container_name), - server_name=self._fqdn, - address=Address(name="parca", port=Parca.port), - path_prefix=self._external_url_path, - tls_config=self._tls_config, - ) # event handlers self.framework.observe(self.on.collect_unit_status, self._on_collect_unit_status) @@ -234,7 +250,6 @@ def _external_url_path(self) -> Optional[str]: def _tls_config(self) -> Optional["TLSConfig"]: if not self.model.relations.get(CERTIFICATES_RELATION_NAME): return None - cr = self._get_certificate_request_attributes() certificate, key = self.certificates.get_assigned_certificate(certificate_request=cr) @@ -257,6 +272,43 @@ def _get_certificate_request_attributes(self) -> CertificateRequestAttributes: sans_dns=sans_dns, ) + # SCRAPE JOBS CONFIGURATION + @property + def _profiling_scrape_configs(self) -> List[ScrapeJobsConfig]: + """The scrape configuration that Parca will use for scraping profiles. + + The configuration includes the targets scraped by Parca as well as Parca's + own workload profiles if they are not already being scraped by a remote Parca. + """ + scrape_configs = self.profiling_consumer.jobs() + # Append parca's self scrape config if no remote parca instance is integrated over "self-profiling-endpoint" + if not self.self_profiling_endpoint_provider.is_ready(): + scrape_configs.append(self._self_profiling_scrape_config) + return scrape_configs + + @property + def _self_profiling_scrape_config(self) -> ScrapeJobsConfig: + """Profiling scrape config to scrape parca's own workload profiles. + + This config also adds juju topology to the scraped profiles. + """ + job_name = "parca" + # add the juju_ prefix to labels + labels = { + "juju_{}".format(key): value + for key, value in JujuTopology.from_charm(self).as_dict().items() + if value + } + + return self._format_scrape_target( + self.nginx.port, + self._scheme, + profiles_path=self._external_url_path, + labels=labels, + job_name=job_name, + relabel_configs=RELABEL_CONFIG, + )[0] + # STORAGE CONFIG @property def _s3_config(self) -> Optional[S3Config]: @@ -293,9 +345,18 @@ def _self_profiling_scrape_jobs(self) -> List[ScrapeJobsConfig]: ) def _format_scrape_target( - self, port: int, scheme="http", metrics_path=None, profiles_path: Optional[str] = None + self, + port: int, + scheme="http", + metrics_path=None, + profiles_path: Optional[str] = None, + labels: Optional[Dict[str, str]] = None, + job_name: Optional[str] = None, + relabel_configs: Optional[List[RelabelConfig]] = None, ) -> List[ScrapeJobsConfig]: job: ScrapeJob = {"targets": [f"{self._fqdn}:{port}"]} + if labels: + job["labels"] = labels jobs_config: ScrapeJobsConfig = {"static_configs": [job]} if metrics_path: jobs_config["metrics_path"] = metrics_path @@ -309,6 +370,10 @@ def _format_scrape_target( # https://github.com/canonical/prometheus-k8s-operator/issues/670 "ca_file" if metrics_path else "ca": Path(CA_CERT_PATH).read_text() } + if job_name: + jobs_config["job_name"] = job_name + if relabel_configs: + jobs_config["relabel_configs"] = relabel_configs return [jobs_config] diff --git a/src/parca.py b/src/parca.py index d4bec20f..cb23a413 100644 --- a/src/parca.py +++ b/src/parca.py @@ -8,7 +8,7 @@ import time import typing import urllib.request -from typing import Dict, List, Literal, Optional, Sequence, TypedDict +from typing import Dict, List, Literal, Optional, Sequence, TypedDict, Union import yaml from ops import Container @@ -32,7 +32,8 @@ DEFAULT_PROFILE_PATH = "/var/lib/parca" S3_TLS_CA_CERT_PATH = "/etc/parca/s3_ca.crt" -ScrapeJob = Dict[str, List[str]] +ScrapeJob = Dict[str, Union[List[str], Dict[str, str]]] +RelabelConfig = Dict[str, Union[list[str], str]] class ScrapeJobsConfig(TypedDict, total=False): @@ -43,6 +44,8 @@ class ScrapeJobsConfig(TypedDict, total=False): metrics_path: str scheme: Optional[Literal["https"]] tls_config: Dict[str, str] + job_name: Optional[str] + relabel_configs: Optional[List[RelabelConfig]] class Parca: diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 56b76139..95ad9519 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -75,3 +75,9 @@ async def deploy_and_configure_minio(ops_test: OpsTest): action = await s3_integrator_leader.run_action("sync-s3-credentials", **config) action_result = await action.wait() assert action_result.status == "completed" + + +async def get_pubic_address(ops_test: OpsTest, app_name): + """Return a juju application's public address.""" + status = await ops_test.model.get_status() # noqa: F821 + return status["applications"][app_name]["public-address"] diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index b3de6061..73556343 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -5,6 +5,7 @@ import asyncio import requests +from helpers import get_pubic_address from pytest import mark from pytest_operator.plugin import OpsTest from tenacity import retry @@ -30,8 +31,7 @@ async def test_deploy(ops_test: OpsTest, parca_charm, parca_resources): @retry(wait=wexp(multiplier=2, min=1, max=30), stop=stop_after_delay(60 * 15), reraise=True) async def test_application_is_up(ops_test: OpsTest): - status = await ops_test.model.get_status() # noqa: F821 - address = status["applications"][PARCA]["public-address"] + address = await get_pubic_address(ops_test, PARCA) response = requests.get(f"http://{address}:8080/") assert response.status_code == 200 response = requests.get(f"http://{address}:8080/metrics") @@ -61,12 +61,18 @@ async def test_profiling_endpoint_relation(ops_test: OpsTest): @retry(wait=wexp(multiplier=2, min=1, max=30), stop=stop_after_attempt(10), reraise=True) async def test_profiling_relation_is_configured(ops_test: OpsTest): - status = await ops_test.model.get_status() # noqa: F821 - address = status["applications"][PARCA]["public-address"] + address = await get_pubic_address(ops_test, PARCA) response = requests.get(f"http://{address}:8080/metrics") assert "zinc" in response.text +@retry(wait=wexp(multiplier=2, min=1, max=30), stop=stop_after_attempt(10), reraise=True) +async def test_self_profiling(ops_test: OpsTest): + address = await get_pubic_address(ops_test, PARCA) + response = requests.get(f"http://{address}:8080/metrics") + assert f'"{PARCA}"' in response.text + + @mark.abort_on_fail async def test_metrics_endpoint_relation(ops_test: OpsTest): await asyncio.gather( diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index d378a873..3e29845e 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -7,6 +7,9 @@ import pytest from helpers import get_unit_fqdn +from tenacity import retry +from tenacity.stop import stop_after_attempt +from tenacity.wait import wait_exponential as wexp from nginx import CA_CERT_PATH, NGINX_PORT @@ -62,6 +65,13 @@ async def test_direct_url_200(ops_test): assert exit_code == 0, f"Failed to query the parca server. {output}" +@retry(wait=wexp(multiplier=2, min=1, max=30), stop=stop_after_attempt(10), reraise=True) +async def test_self_profiling_scraping(ops_test): + exit_code, output = query_parca_server(ops_test.model_name, PARCA, url_path="/metrics") + assert exit_code == 0, f"Failed to query the parca server. {output}" + assert f'"{PARCA}"' in output + + @pytest.mark.abort_on_fail async def test_deploy_parca_tester(ops_test, parca_charm, parca_resources): # Deploy and integrate tester charm @@ -78,7 +88,7 @@ async def test_deploy_parca_tester(ops_test, parca_charm, parca_resources): await ops_test.model.wait_for_idle(apps=[PARCA, PARCA_TESTER], status="active", timeout=500) -async def test_tls_scraping(ops_test): +async def test_profiling_scraping(ops_test): exit_code, output = query_parca_server(ops_test.model_name, PARCA_TESTER, url_path="/metrics") assert exit_code == 0, f"Failed to query the parca server. {output}" assert PARCA_TESTER in output @@ -90,9 +100,10 @@ async def test_remove_tls(ops_test): # FIXME: should we be disintegrating the tester-ssc relation too? await ops_test.juju("remove-relation", PARCA + ":certificates", SSC + ":certificates") # we need to wait for a while until parca's nginx loses the TLS connection - await ops_test.model.wait_for_idle(apps=[PARCA], status="active", timeout=500, idle_period=60) + await ops_test.model.wait_for_idle(apps=[PARCA], status="active", timeout=500) +@retry(wait=wexp(multiplier=2, min=1, max=30), stop=stop_after_attempt(10), reraise=True) async def test_direct_url_400(ops_test): exit_code, _ = query_parca_server(ops_test.model_name, SSC, SSC_CA_CERT_PATH) assert exit_code != 0 diff --git a/tests/unit/test_charm/conftest.py b/tests/unit/test_charm/conftest.py index 8f6329f0..a8399b6a 100644 --- a/tests/unit/test_charm/conftest.py +++ b/tests/unit/test_charm/conftest.py @@ -1,5 +1,5 @@ from contextlib import ExitStack -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from ops.testing import Container, Context, PeerRelation @@ -26,6 +26,12 @@ def patch_all(tmp_path): stack.enter_context(patch("charm.CA_CERT_PATH", str(ca_tmp_path))) stack.enter_context(patch("parca.CA_CERT_PATH", str(ca_tmp_path))) stack.enter_context(patch("parca.Parca.version", "v0.12.0")) + stack.enter_context( + patch( + "charm.JujuTopology.from_charm", + MagicMock(return_value=MagicMock(as_dict=MagicMock(return_value={}))), + ) + ) yield diff --git a/tests/unit/test_charm/test_charm.py b/tests/unit/test_charm/test_charm.py index 04abe863..f1910496 100644 --- a/tests/unit/test_charm/test_charm.py +++ b/tests/unit/test_charm/test_charm.py @@ -10,6 +10,7 @@ from ops.model import ActiveStatus, WaitingStatus from ops.testing import CharmEvents, Relation, State +from charm import RELABEL_CONFIG from nginx import NGINX_PORT from parca import DEFAULT_CONFIG_PATH, PARCA_PORT from tests.unit.test_charm.container_utils import ( @@ -138,7 +139,13 @@ def test_config_changed_active_memory(context, base_state): def test_config_file_written(context, parca_container, base_state): - state_out = context.run(context.on.pebble_ready(parca_container), base_state) + self_profiling = Relation( + "self-profiling-endpoint", + ) + + state_out = context.run( + context.on.pebble_ready(parca_container), replace(base_state, relations={self_profiling}) + ) assert_parca_config_equals( context, state_out, @@ -192,6 +199,10 @@ def test_parca_pebble_layer_storage_persist(context, base_state): def test_profiling_endpoint_relation(context, base_state): + self_profiling_relation = Relation( + "self-profiling-endpoint", + ) + relation = Relation( "profiling-endpoint", remote_app_name="profiled-app", @@ -225,23 +236,12 @@ def test_profiling_endpoint_relation(context, base_state): } ], "job_name": f"test-model_{str(_uuid).split('-')[0]}_profiled-app_my-first-job", - "relabel_configs": [ - { - "source_labels": [ - "juju_model", - "juju_model_uuid", - "juju_application", - "juju_unit", - ], - "separator": "_", - "target_label": "instance", - "regex": "(.*)", - } - ], + "relabel_configs": RELABEL_CONFIG, } ] with context( - context.on.relation_changed(relation), replace(base_state, relations={relation}) + context.on.relation_changed(relation), + replace(base_state, relations={relation, self_profiling_relation}), ) as mgr: assert mgr.charm.profiling_consumer.jobs() == expected_jobs state_out = mgr.run() @@ -327,3 +327,44 @@ def test_parca_external_store_relation(context, base_state): f"--insecure=false " f"--mode=scraper-only", ) + + +def test_self_profiling_no_endpoint_relation(context, base_state): + # verify that the scrape config contains the self-scraping job + expected_scrape_config = [ + { + "job_name": "parca", + "relabel_configs": RELABEL_CONFIG, + "static_configs": [ + { + "targets": [f"{socket.getfqdn()}:{NGINX_PORT}"], + } + ], + } + ] + + with context(context.on.config_changed(), base_state) as mgr: + scrape_config = mgr.charm._profiling_scrape_configs + assert scrape_config == expected_scrape_config + + +def test_self_profiling_endpoint_relation(context, base_state): + expected_scrape_jobs = [ + {"static_configs": [{"targets": [f"{socket.getfqdn()}:{NGINX_PORT}"]}]} + ] + # GIVEN a self-profiling-endpoint relation + relation = Relation("self-profiling-endpoint") + + # WHEN we get a relation changed event + with context( + context.on.relation_changed(relation), + replace(base_state, leader=True, relations={relation}), + ) as mgr: + state_out = mgr.run() + scrape_config = mgr.charm._profiling_scrape_configs + # THEN no self-profiling scrape job in the generated config + assert not scrape_config + + # AND self-profiling scrape job is sent to remote app + rel_out = state_out.get_relation(relation.id) + assert rel_out.local_app_data["scrape_jobs"] == json.dumps(expected_scrape_jobs) diff --git a/tests/unit/test_charm/test_tls.py b/tests/unit/test_charm/test_tls.py index 196f3e44..a588ada7 100644 --- a/tests/unit/test_charm/test_tls.py +++ b/tests/unit/test_charm/test_tls.py @@ -92,11 +92,7 @@ def base_state( ) -def test_endpoint_with_tls_enabled( - context, - base_state, - certificates, -): +def test_endpoint_with_tls_enabled(context, base_state, certificates, ca): # GIVEN a charm with certificates relation # WHEN we process any event with context(context.on.relation_changed(certificates), base_state) as mgr: @@ -104,6 +100,9 @@ def test_endpoint_with_tls_enabled( # THEN we have TLS enabled assert charm._tls_ready assert charm._external_url.startswith("https://") + scrape_config = charm._self_profiling_scrape_config + assert "scheme" in scrape_config and scrape_config["scheme"] == "https" + assert "tls_config" in scrape_config and scrape_config["tls_config"]["ca"] == ca.raw def test_endpoint_with_tls_disabled( @@ -118,3 +117,5 @@ def test_endpoint_with_tls_disabled( # THEN we have TLS disabled assert not charm._tls_ready assert charm._external_url.startswith("http://") + scrape_config = charm._self_profiling_scrape_config + assert "scheme" not in scrape_config diff --git a/uv.lock b/uv.lock index eca7ce06..944a7e32 100644 --- a/uv.lock +++ b/uv.lock @@ -1346,7 +1346,7 @@ wheels = [ [[package]] name = "pytest-operator" -version = "0.38.0" +version = "0.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipdb" }, @@ -1356,9 +1356,9 @@ dependencies = [ { name = "pytest-asyncio" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/ac/357bd534ad9f98eaf1a472746614a99073c631d92607f6aba37e6ee0fb69/pytest_operator-0.38.0.tar.gz", hash = "sha256:bf2dc92fc32eff82708b57783941f24c8500dfb3770098811c28ee31b7f292f3", size = 43746 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/33/cd3d5e614f586b3c63a52c83c3784b5d343cd5d9ec9d533b39482fba9a0e/pytest-operator-0.35.0.tar.gz", hash = "sha256:ed963dc013fc576e218081e95197926b7c98116c1fb5ab234269cf72e0746d5b", size = 43000 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/da/7afbef57cdfe2050b266ff3c835df74d91ea3db4b110e7a1c64e902f5bd1/pytest_operator-0.38.0-py3-none-any.whl", hash = "sha256:eef53d40d1301d1efd97c98021b3715e1ad9e932e720792e1897e8de3d47a8d2", size = 25034 }, + { url = "https://files.pythonhosted.org/packages/be/bd/0864ac5ea0b9fd52cf46de16361bef8df0b122dee8a15cace8205b4563d5/pytest_operator-0.35.0-py3-none-any.whl", hash = "sha256:026715faba7a0d725ca386fe05a45cfc73746293d8d755be6d2a67ca252267f5", size = 24584 }, ] [[package]]