diff --git a/lib/charms/grafana_k8s/v0/grafana_dashboard.py b/lib/charms/grafana_k8s/v0/grafana_dashboard.py index 1f1bc4f0..dfc32ddc 100644 --- a/lib/charms/grafana_k8s/v0/grafana_dashboard.py +++ b/lib/charms/grafana_k8s/v0/grafana_dashboard.py @@ -219,7 +219,7 @@ def __init__(self, *args): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 35 +LIBPATCH = 36 logger = logging.getLogger(__name__) @@ -1050,6 +1050,7 @@ def __init__( self.framework.observe(self._charm.on.leader_elected, self._update_all_dashboards_from_dir) self.framework.observe(self._charm.on.upgrade_charm, self._update_all_dashboards_from_dir) + self.framework.observe(self._charm.on.config_changed, self._update_all_dashboards_from_dir) self.framework.observe( self._charm.on[self._relation_name].relation_created, diff --git a/lib/charms/loki_k8s/v1/loki_push_api.py b/lib/charms/loki_k8s/v1/loki_push_api.py index 4b3bcfee..c3c1d086 100644 --- a/lib/charms/loki_k8s/v1/loki_push_api.py +++ b/lib/charms/loki_k8s/v1/loki_push_api.py @@ -16,20 +16,24 @@ send log to Loki by implementing the consumer side of the `loki_push_api` relation interface. For instance, a Promtail or Grafana agent charm which needs to send logs to Loki. -- `LogProxyConsumer`: This object can be used by any Charmed Operator which needs to -send telemetry, such as logs, to Loki through a Log Proxy by implementing the consumer side of the -`loki_push_api` relation interface. +- `LogProxyConsumer`: DEPRECATED. +This object can be used by any Charmed Operator which needs to send telemetry, such as logs, to +Loki through a Log Proxy by implementing the consumer side of the `loki_push_api` relation +interface. +In order to be able to control the labels on the logs pushed this object adds a Pebble layer +that runs Promtail in the workload container, injecting Juju topology labels into the +logs on the fly. +This object is deprecated. Consider migrating to LogForwarder with the release of Juju 3.6 LTS. - `LogForwarder`: This object can be used by any Charmed Operator which needs to send the workload standard output (stdout) through Pebble's log forwarding mechanism, to Loki endpoints through the `loki_push_api` relation interface. +In order to be able to control the labels on the logs pushed this object updates the pebble layer's +"log-targets" section with Juju topology. Filtering logs in Loki is largely performed on the basis of labels. In the Juju ecosystem, Juju topology labels are used to uniquely identify the workload which generates telemetry like logs. -In order to be able to control the labels on the logs pushed this object adds a Pebble layer -that runs Promtail in the workload container, injecting Juju topology labels into the -logs on the fly. ## LokiPushApiProvider Library Usage @@ -42,13 +46,14 @@ - `charm`: A reference to the parent (Loki) charm. - `relation_name`: The name of the relation that the charm uses to interact - with its clients, which implement `LokiPushApiConsumer` or `LogProxyConsumer`. + with its clients, which implement `LokiPushApiConsumer` `LogForwarder`, or `LogProxyConsumer` + (note that LogProxyConsumer is deprecated). If provided, this relation name must match a provided relation in metadata.yaml with the `loki_push_api` interface. - The default relation name is "logging" for `LokiPushApiConsumer` and "log-proxy" for - `LogProxyConsumer`. + The default relation name is "logging" for `LokiPushApiConsumer` and `LogForwarder`, and + "log-proxy" for `LogProxyConsumer` (note that LogProxyConsumer is deprecated). For example, a provider's `metadata.yaml` file may look as follows: @@ -223,6 +228,9 @@ def __init__(self, *args): ## LogProxyConsumer Library Usage +> Note: This object is deprecated. Consider migrating to LogForwarder with the release of Juju 3.6 +> LTS. + Let's say that we have a workload charm that produces logs, and we need to send those logs to a workload implementing the `loki_push_api` interface, such as `Loki` or `Grafana Agent`. @@ -519,7 +527,7 @@ def _alert_rules_error(self, event): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 10 +LIBPATCH = 11 PYDEPS = ["cosl"] @@ -1593,7 +1601,8 @@ def __init__( the Loki API endpoint to push logs. It is intended for workloads that can speak loki_push_api (https://grafana.com/docs/loki/latest/api/#push-log-entries-to-loki), such as grafana-agent. - (If you only need to forward a few workload log files, then use LogProxyConsumer.) + (If you need to forward workload stdout logs, then use LogForwarder; if you need to forward + log files, then use LogProxyConsumer.) `LokiPushApiConsumer` can be instantiated as follows: @@ -1768,6 +1777,9 @@ class LogProxyEvents(ObjectEvents): class LogProxyConsumer(ConsumerBase): """LogProxyConsumer class. + > Note: This object is deprecated. Consider migrating to LogForwarder with the release of Juju + > 3.6 LTS. + The `LogProxyConsumer` object provides a method for attaching `promtail` to a workload in order to generate structured logging data from applications which traditionally log to syslog or do not have native Loki integration. diff --git a/lib/charms/observability_libs/v1/cert_handler.py b/lib/charms/observability_libs/v1/cert_handler.py index c482662f..f6a3eda4 100644 --- a/lib/charms/observability_libs/v1/cert_handler.py +++ b/lib/charms/observability_libs/v1/cert_handler.py @@ -67,7 +67,7 @@ LIBID = "b5cd5cd580f3428fa5f59a8876dcbe6a" LIBAPI = 1 -LIBPATCH = 8 +LIBPATCH = 9 VAULT_SECRET_LABEL = "cert-handler-private-vault" @@ -391,30 +391,37 @@ def _migrate_vault(self): @property def enabled(self) -> bool: - """Boolean indicating whether the charm has a tls_certificates relation.""" + """Boolean indicating whether the charm has a tls_certificates relation. + + See also the `available` property. + """ # We need to check for units as a temporary workaround because of https://bugs.launchpad.net/juju/+bug/2024583 # This could in theory not work correctly on scale down to 0 but it is necessary for the moment. - if not self.charm.model.get_relation(self.certificates_relation_name): + if not self.relation: return False - if not self.charm.model.get_relation( - self.certificates_relation_name - ).units: # pyright: ignore + if not self.relation.units: # pyright: ignore return False - if not self.charm.model.get_relation( - self.certificates_relation_name - ).app: # pyright: ignore + if not self.relation.app: # pyright: ignore return False - if not self.charm.model.get_relation( - self.certificates_relation_name - ).data: # pyright: ignore + if not self.relation.data: # pyright: ignore return False return True + @property + def available(self) -> bool: + """Return True if all certs are available in relation data; False otherwise.""" + return ( + self.enabled + and self.server_cert is not None + and self.private_key is not None + and self.ca_cert is not None + ) + def _on_certificates_relation_joined(self, _) -> None: # this will only generate a csr if we don't have one already self._generate_csr() diff --git a/lib/charms/tempo_k8s/v1/charm_tracing.py b/lib/charms/tempo_k8s/v1/charm_tracing.py index 7c118856..000e0cb5 100644 --- a/lib/charms/tempo_k8s/v1/charm_tracing.py +++ b/lib/charms/tempo_k8s/v1/charm_tracing.py @@ -147,7 +147,7 @@ def my_tracing_endpoint(self) -> Optional[str]: # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 6 +LIBPATCH = 8 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -509,18 +509,20 @@ def trace_type(cls: _T) -> _T: logger.info(f"skipping {method} (dunder)") continue - isstatic = isinstance(inspect.getattr_static(cls, method.__name__), staticmethod) - setattr(cls, name, trace_method(method, static=isstatic)) + new_method = trace_method(method) + if isinstance(inspect.getattr_static(cls, method.__name__), staticmethod): + new_method = staticmethod(new_method) + setattr(cls, name, new_method) return cls -def trace_method(method: _F, static: bool = False) -> _F: +def trace_method(method: _F) -> _F: """Trace this method. A span will be opened when this method is called and closed when it returns. """ - return _trace_callable(method, "method", static=static) + return _trace_callable(method, "method") def trace_function(function: _F) -> _F: @@ -531,20 +533,14 @@ def trace_function(function: _F) -> _F: return _trace_callable(function, "function") -def _trace_callable(callable: _F, qualifier: str, static: bool = False) -> _F: +def _trace_callable(callable: _F, qualifier: str) -> _F: logger.info(f"instrumenting {callable}") # sig = inspect.signature(callable) @functools.wraps(callable) def wrapped_function(*args, **kwargs): # type: ignore name = getattr(callable, "__qualname__", getattr(callable, "__name__", str(callable))) - with _span(f"{'(static) ' if static else ''}{qualifier} call: {name}"): # type: ignore - if static: - # fixme: do we or don't we need [1:]? - # The _trace_callable decorator doesn't always play nice with @staticmethods. - # Sometimes it will receive 'self', sometimes it won't. - # return callable(*args, **kwargs) # type: ignore - return callable(*args[1:], **kwargs) # type: ignore + with _span(f"{qualifier} call: {name}"): # type: ignore return callable(*args, **kwargs) # type: ignore # wrapped_function.__signature__ = sig diff --git a/lib/charms/tempo_k8s/v2/tracing.py b/lib/charms/tempo_k8s/v2/tracing.py index 9e5defc8..b4e341c3 100644 --- a/lib/charms/tempo_k8s/v2/tracing.py +++ b/lib/charms/tempo_k8s/v2/tracing.py @@ -69,6 +69,7 @@ def __init__(self, *args): """ # noqa: W505 +import enum import json import logging from typing import ( @@ -94,7 +95,7 @@ def __init__(self, *args): ) from ops.framework import EventSource, Object from ops.model import ModelError, Relation -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict, Field # The unique Charmhub library identifier, never change it LIBID = "12977e9aa0b34367903d8afeb8c3d85d" @@ -104,7 +105,7 @@ def __init__(self, *args): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 5 +LIBPATCH = 6 PYDEPS = ["pydantic"] @@ -121,16 +122,36 @@ def __init__(self, *args): "tempo_grpc", "otlp_grpc", "otlp_http", - # "jaeger_grpc", - "jaeger_thrift_compact", - "jaeger_thrift_http", - "jaeger_thrift_binary", ] -RawReceiver = Tuple[ReceiverProtocol, int] +RawReceiver = Tuple[ReceiverProtocol, str] +"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), +(secured, if available) resolvable server url. +""" + BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} +class TransportProtocolType(str, enum.Enum): + """Receiver Type.""" + + http = "http" + grpc = "grpc" + + +receiver_protocol_to_transport_protocol = { + "zipkin": TransportProtocolType.http, + "kafka": TransportProtocolType.http, + "opencensus": TransportProtocolType.http, + "tempo_http": TransportProtocolType.http, + "tempo_grpc": TransportProtocolType.grpc, + "otlp_grpc": TransportProtocolType.grpc, + "otlp_http": TransportProtocolType.http, +} +"""A mapping between telemetry protocols and their corresponding transport protocol. +""" + + class TracingError(Exception): """Base class for custom errors raised by this library.""" @@ -289,27 +310,81 @@ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): # todo use models from charm-relation-interfaces -class Receiver(BaseModel): # noqa: D101 - """Receiver data structure.""" +if int(pydantic.version.VERSION.split(".")[0]) < 2: + + class ProtocolType(BaseModel): # type: ignore + """Protocol Type.""" - protocol: ReceiverProtocol - port: int + class Config: + """Pydantic config.""" + use_enum_values = True + """Allow serializing enum values.""" -class TracingProviderAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing provider.""" + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) + +else: + + class ProtocolType(BaseModel): + """Protocol Type.""" + + model_config = ConfigDict( + # Allow serializing enum values. + use_enum_values=True + ) + """Pydantic config.""" + + name: str = Field( + ..., + description="Receiver protocol name. What protocols are supported (and what they are called) " + "may differ per provider.", + examples=["otlp_grpc", "otlp_http", "tempo_http"], + ) + + type: TransportProtocolType = Field( + ..., + description="The transport protocol used by this receiver.", + examples=["http", "grpc"], + ) - host: str - """Server hostname (local fqdn).""" - receivers: List[Receiver] - """Enabled receivers and ports at which they are listening.""" +class Receiver(BaseModel): + """Specification of an active receiver.""" - external_url: Optional[str] = None - """Server url. If an ingress is present, it will be the ingress address.""" + protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") + url: str = Field( + ..., + description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. + Otherwise, it would be the service's fqdn or internal IP. + If the protocol type is grpc, the url will not contain a scheme.""", + examples=[ + "http://traefik_address:2331", + "https://traefik_address:2331", + "http://tempo_public_ip:2331", + "https://tempo_public_ip:2331", + "tempo_public_ip:2331", + ], + ) + + +class TracingProviderAppData(DatabagModel): # noqa: D101 + """Application databag model for the tracing provider.""" - internal_scheme: Optional[str] = None - """Scheme for internal communication. If it is present, it will be protocol accepted by the provider.""" + receivers: List[Receiver] = Field( + ..., + description="List of all receivers enabled on the tracing provider.", + ) class TracingRequirerAppData(DatabagModel): # noqa: D101 @@ -481,10 +556,15 @@ def requested_receivers(self) -> List[ReceiverProtocol]: return TracingRequirerAppData.load(relation.data[app]).receivers +class BrokenEvent(RelationBrokenEvent): + """Event emitted when a relation on tracing is broken.""" + + class TracingEndpointProviderEvents(CharmEvents): """TracingEndpointProvider events.""" request = EventSource(RequestEvent) + broken = EventSource(BrokenEvent) class TracingEndpointProvider(Object): @@ -495,21 +575,17 @@ class TracingEndpointProvider(Object): def __init__( self, charm: CharmBase, - host: str, external_url: Optional[str] = None, relation_name: str = DEFAULT_RELATION_NAME, - internal_scheme: Optional[Literal["http", "https"]] = "http", ): """Initialize. Args: charm: a `CharmBase` instance that manages this instance of the Tempo service. - host: address of the node hosting the tempo server. external_url: external address of the node hosting the tempo server, if an ingress is present. relation_name: an optional string name of the relation between `charm` and the Tempo charmed service. The default is "tracing". - internal_scheme: scheme to use with internal urls. Raises: RelationNotFoundError: If there is no relation in the charm's metadata.yaml @@ -525,12 +601,10 @@ def __init__( charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides ) - super().__init__(charm, relation_name + "tracing-provider-v2") + super().__init__(charm, relation_name + "tracing-provider") self._charm = charm - self._host = host self._external_url = external_url self._relation_name = relation_name - self._internal_scheme = internal_scheme self.framework.observe( self._charm.on[relation_name].relation_joined, self._on_relation_event ) @@ -540,18 +614,21 @@ def __init__( self.framework.observe( self._charm.on[relation_name].relation_changed, self._on_relation_event ) + self.framework.observe( + self._charm.on[relation_name].relation_broken, self._on_relation_broken_event + ) + + def _on_relation_broken_event(self, e: RelationBrokenEvent): + """Handle relation broken events.""" + self.on.broken.emit(e.relation) def _on_relation_event(self, e: RelationEvent): """Handle relation created/joined/changed events.""" - if self.is_v2(e.relation): + if self.is_requirer_ready(e.relation): self.on.request.emit(e.relation) - def is_v2(self, relation: Relation): - """Attempt to determine if this relation is a tracing v2 relation. - - Assumes that the V2 requirer will, as soon as possible (relation-created), - publish the list of requested ingestion receivers (can be empty too). - """ + def is_requirer_ready(self, relation: Relation): + """Attempt to determine if requirer has already populated app data.""" try: self._get_requested_protocols(relation) except NotReadyError: @@ -567,7 +644,7 @@ def _get_requested_protocols(relation: Relation): try: databag = TracingRequirerAppData.load(relation.data[app]) except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"relation {relation} is not ready to talk tracing v2") + logger.info(f"relation {relation} is not ready to talk tracing") raise NotReadyError() return databag.receivers @@ -584,8 +661,8 @@ def requested_protocols(self): @property def relations(self) -> List[Relation]: - """All v2 relations active on this endpoint.""" - return [r for r in self._charm.model.relations[self._relation_name] if self.is_v2(r)] + """All relations active on this endpoint.""" + return self._charm.model.relations[self._relation_name] def publish_receivers(self, receivers: Sequence[RawReceiver]): """Let all requirers know that these receivers are active and listening.""" @@ -595,12 +672,16 @@ def publish_receivers(self, receivers: Sequence[RawReceiver]): for relation in self.relations: try: TracingProviderAppData( - host=self._host, - external_url=self._external_url or None, receivers=[ - Receiver(port=port, protocol=protocol) for protocol, port in receivers + Receiver( + url=url, + protocol=ProtocolType( + name=protocol, + type=receiver_protocol_to_transport_protocol[protocol], + ), + ) + for protocol, url in receivers ], - internal_scheme=self._internal_scheme, ).dump(relation.data[self._charm.app]) except ModelError as e: @@ -625,11 +706,9 @@ class EndpointRemovedEvent(RelationBrokenEvent): class EndpointChangedEvent(_AutoSnapshotEvent): """Event representing a change in one of the receiver endpoints.""" - __args__ = ("host", "external_url", "_receivers") + __args__ = ("_receivers",) if TYPE_CHECKING: - host = "" # type: str - external_url = "" # type: str _receivers = [] # type: List[dict] @property @@ -769,12 +848,6 @@ def is_ready(self, relation: Optional[Relation] = None): return False try: databag = dict(relation.data[relation.app]) - # "ingesters" Might be populated if the provider sees a v1 relation before a v2 requirer has had time to - # publish the 'receivers' list. This will make Tempo incorrectly assume that this is a v1 - # relation, and act accordingly. Later, when the requirer publishes the requested receivers, - # tempo will be able to course-correct. - if "ingesters" in databag: - del databag["ingesters"] TracingProviderAppData.load(databag) except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): @@ -790,9 +863,7 @@ def _on_tracing_relation_changed(self, event): return data = TracingProviderAppData.load(relation.data[relation.app]) - self.on.endpoint_changed.emit( # type: ignore - relation, data.host, data.external_url, [i.dict() for i in data.receivers] - ) + self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore def _on_tracing_relation_broken(self, event: RelationBrokenEvent): """Notify the providers that the endpoint is broken.""" @@ -815,7 +886,7 @@ def _get_endpoint( if not app_data: return None receivers: List[Receiver] = list( - filter(lambda i: i.protocol == protocol, app_data.receivers) + filter(lambda i: i.protocol.name == protocol, app_data.receivers) ) if not receivers: logger.error(f"no receiver found with protocol={protocol!r}") @@ -827,18 +898,7 @@ def _get_endpoint( return receiver = receivers[0] - # if there's an external_url argument (v2.5+), use that. Otherwise, we use the tempo local fqdn - if app_data.external_url: - url = f"{app_data.external_url}:{receiver.port}" - else: - # if we didn't receive a scheme (old provider), we assume HTTP is used - url = f"{app_data.internal_scheme or 'http'}://{app_data.host}:{receiver.port}" - - if receiver.protocol.endswith("grpc"): - # TCP protocols don't want an http/https scheme prefix - url = url.split("://")[1] - - return url + return receiver.url def get_endpoint( self, protocol: ReceiverProtocol, relation: Optional[Relation] = None @@ -861,20 +921,3 @@ def get_endpoint( return None return endpoint - - # for backwards compatibility with earlier revisions: - def otlp_grpc_endpoint(self): - """Use TracingEndpointRequirer.get_endpoint('otlp_grpc') instead.""" - logger.warning( - "`TracingEndpointRequirer.otlp_grpc_endpoint` is deprecated. " - "Use `TracingEndpointRequirer.get_endpoint('otlp_grpc') instead.`" - ) - return self.get_endpoint("otlp_grpc") - - def otlp_http_endpoint(self): - """Use TracingEndpointRequirer.get_endpoint('otlp_http') instead.""" - logger.warning( - "`TracingEndpointRequirer.otlp_http_endpoint` is deprecated. " - "Use `TracingEndpointRequirer.get_endpoint('otlp_http') instead.`" - ) - return self.get_endpoint("otlp_http") diff --git a/lib/charms/tls_certificates_interface/v3/tls_certificates.py b/lib/charms/tls_certificates_interface/v3/tls_certificates.py index 2e45475a..33f34b62 100644 --- a/lib/charms/tls_certificates_interface/v3/tls_certificates.py +++ b/lib/charms/tls_certificates_interface/v3/tls_certificates.py @@ -317,7 +317,7 @@ def _on_all_certificates_invalidated(self, event: AllCertificatesInvalidatedEven # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 14 +LIBPATCH = 15 PYDEPS = ["cryptography", "jsonschema"] @@ -1093,6 +1093,13 @@ def generate_csr( # noqa: C901 return signed_certificate.public_bytes(serialization.Encoding.PEM) +def get_sha256_hex(data: str) -> str: + """Calculate the hash of the provided data and return the hexadecimal representation.""" + digest = hashes.Hash(hashes.SHA256()) + digest.update(data.encode()) + return digest.finalize().hex() + + def csr_matches_certificate(csr: str, cert: str) -> bool: """Check if a CSR matches a certificate. @@ -1872,12 +1879,15 @@ def _on_relation_changed(self, event: RelationChangedEvent) -> None: ] for certificate in provider_certificates: if certificate.csr in requirer_csrs: + csr_in_sha256_hex = get_sha256_hex(certificate.csr) if certificate.revoked: with suppress(SecretNotFoundError): logger.debug( - "Removing secret with label %s", f"{LIBID}-{certificate.csr}" + "Removing secret with label %s", + f"{LIBID}-{csr_in_sha256_hex}", ) - secret = self.model.get_secret(label=f"{LIBID}-{certificate.csr}") + secret = self.model.get_secret( + label=f"{LIBID}-{csr_in_sha256_hex}") secret.remove_all_revisions() self.on.certificate_invalidated.emit( reason="revoked", @@ -1889,20 +1899,22 @@ def _on_relation_changed(self, event: RelationChangedEvent) -> None: else: try: logger.debug( - "Setting secret with label %s", f"{LIBID}-{certificate.csr}" + "Setting secret with label %s", f"{LIBID}-{csr_in_sha256_hex}" + ) + secret = self.model.get_secret(label=f"{LIBID}-{csr_in_sha256_hex}") + secret.set_content( + {"certificate": certificate.certificate, "csr": certificate.csr} ) - secret = self.model.get_secret(label=f"{LIBID}-{certificate.csr}") - secret.set_content({"certificate": certificate.certificate}) secret.set_info( expire=self._get_next_secret_expiry_time(certificate), ) except SecretNotFoundError: logger.debug( - "Creating new secret with label %s", f"{LIBID}-{certificate.csr}" + "Creating new secret with label %s", f"{LIBID}-{csr_in_sha256_hex}" ) secret = self.charm.unit.add_secret( - {"certificate": certificate.certificate}, - label=f"{LIBID}-{certificate.csr}", + {"certificate": certificate.certificate, "csr": certificate.csr}, + label=f"{LIBID}-{csr_in_sha256_hex}", expire=self._get_next_secret_expiry_time(certificate), ) self.on.certificate_available.emit( @@ -1965,7 +1977,7 @@ def _on_secret_expired(self, event: SecretExpiredEvent) -> None: """ if not event.secret.label or not event.secret.label.startswith(f"{LIBID}-"): return - csr = event.secret.label[len(f"{LIBID}-") :] + csr = event.secret.get_content()["csr"] provider_certificate = self._find_certificate_in_relation_data(csr) if not provider_certificate: # A secret expired but we did not find matching certificate. Cleaning up diff --git a/tests/scenario/test_tracing_integration.py b/tests/scenario/test_tracing_integration.py index 49be6281..836bb233 100644 --- a/tests/scenario/test_tracing_integration.py +++ b/tests/scenario/test_tracing_integration.py @@ -4,7 +4,7 @@ import pytest import yaml from charms.tempo_k8s.v1.charm_tracing import charm_tracing_disabled -from charms.tempo_k8s.v2.tracing import Receiver, TracingProviderAppData +from charms.tempo_k8s.v2.tracing import ProtocolType, Receiver, TracingProviderAppData from scenario import Relation, State from traefik import CA_CERT_PATH, DYNAMIC_TRACING_PATH @@ -13,7 +13,12 @@ def tracing_relation(): db = {} TracingProviderAppData( - host="foo.com", receivers=[Receiver(protocol="otlp_http", port=81)] + receivers=[ + Receiver( + url="http://foo.com:81", + protocol=ProtocolType(name="otlp_http", type="http"), + ) + ] ).dump(db) tracing = Relation("tracing", remote_app_data=db) return tracing diff --git a/tox.ini b/tox.ini index 341f781e..af9e5859 100644 --- a/tox.ini +++ b/tox.ini @@ -61,14 +61,12 @@ commands = [testenv:integration] description = Run integration tests deps = - # pinned for https://github.com/charmed-kubernetes/pytest-operator/issues/131 - pytest==8.1.1 - - # fix for https://github.com/pytest-dev/pytest-asyncio/releases/tag/v0.23.0 making pytest-operator b0rk - pytest-asyncio==0.21.0 + pytest==8.2.2 + pytest-asyncio==0.21.2 pytest-operator juju - tenacity + # fix for https://github.com/jd/tenacity/issues/471 + tenacity==8.3.0 sh -r{toxinidir}/requirements.txt commands = @@ -90,8 +88,7 @@ allowlist_externals = /usr/bin/env description = Scenario tests deps = pytest - ; scenario is pinned until (got an unexpected keyword argument 'relation_id') error is fixed - ops-scenario<=6.0.3 + ops-scenario -r{toxinidir}/requirements.txt commands = pytest -v --tb native {[vars]tst_path}/scenario --log-cli-level=INFO -s {posargs}