Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve WordPress's observability and increase health check timeout #194

Merged
merged 21 commits into from
Mar 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 25 additions & 7 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import yaml
from charms.data_platform_libs.v0.data_interfaces import DatabaseRequires
from charms.grafana_k8s.v0.grafana_dashboard import GrafanaDashboardProvider
from charms.loki_k8s.v0.loki_push_api import LogProxyConsumer
from charms.nginx_ingress_integrator.v0.nginx_route import require_nginx_route
from charms.prometheus_k8s.v0.prometheus_scrape import MetricsEndpointProvider
from ops.charm import ActionEvent, CharmBase, HookEvent, PebbleReadyEvent, UpgradeCharmEvent
Expand All @@ -39,7 +38,7 @@
_APACHE_EXPORTER_PEBBLE_SERVICE,
APACHE_LOG_PATHS,
PROM_EXPORTER_PEBBLE_CONFIG,
WORDPRESS_SCRAPE_JOBS,
ApacheLogProxyConsumer,
)
from state import CharmConfigInvalidError, State

Expand Down Expand Up @@ -150,12 +149,30 @@ def __init__(self, *args, **kwargs):
)

self._require_nginx_route()
self._logging = ApacheLogProxyConsumer(
self, relation_name="logging", log_files=APACHE_LOG_PATHS, container_name="wordpress"
)
prometheus_jobs = [
{
"job_name": "apache_exporter",
"static_configs": [{"targets": ["*:9117"]}],
}
]
if self._logging.loki_endpoints:
prometheus_jobs.append(
{
"job_name": "promtail",
"static_configs": [{"targets": ["*:9080"]}],
}
)
self.metrics_endpoint = MetricsEndpointProvider(
self,
jobs=WORDPRESS_SCRAPE_JOBS,
)
self._logging = LogProxyConsumer(
self, relation_name="logging", log_files=APACHE_LOG_PATHS, container_name="wordpress"
jobs=prometheus_jobs,
refresh_event=[
self.on.wordpress_pebble_ready,
self._logging.on.log_proxy_endpoint_departed,
self._logging.on.log_proxy_endpoint_joined,
],
)
self._grafana_dashboards = GrafanaDashboardProvider(self)

Expand Down Expand Up @@ -706,7 +723,8 @@ def _init_pebble_layer(self):
"wordpress-ready": {
"override": "replace",
"level": "alive",
"http": {"url": "http://localhost/index.php"},
"http": {"url": "http://localhost"},
"timeout": "5s",
},
},
}
Expand Down
83 changes: 70 additions & 13 deletions src/cos.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"""COS integration for WordPress charm."""
from typing import Dict, List, TypedDict

from charms.loki_k8s.v0.loki_push_api import LogProxyConsumer
from ops.pebble import Check, Layer, Service


Expand Down Expand Up @@ -40,17 +41,6 @@ class PrometheusMetricsJob(TypedDict, total=False):


APACHE_PROMETHEUS_SCRAPE_PORT = "9117"
WORDPRESS_SCRAPE_JOBS = [
PrometheusMetricsJob(
static_configs=[
PrometheusStaticConfig(
targets=[
f"*:{APACHE_PROMETHEUS_SCRAPE_PORT}",
]
)
]
)
]
_APACHE_EXPORTER_PEBBLE_SERVICE = Service(
name="apache-exporter",
raw={
Expand Down Expand Up @@ -80,6 +70,73 @@ class PrometheusMetricsJob(TypedDict, total=False):
)

APACHE_LOG_PATHS = [
"/var/log/apache2/access.log",
"/var/log/apache2/error.log",
"/var/log/apache2/access.*.log",
"/var/log/apache2/error.*.log",
]

REQUEST_DURATION_MICROSECONDS_BUCKETS = [
10000,
25000,
50000,
100000,
200000,
300000,
400000,
500000,
750000,
1000000,
1500000,
2000000,
2500000,
5000000,
10000000,
]


class ApacheLogProxyConsumer(LogProxyConsumer):
"""Extends LogProxyConsumer to add a metrics pipeline to promtail."""

def _scrape_configs(self) -> dict:
"""Generate the scrape_configs section of the Promtail config file.

Returns:
A dict representing the `scrape_configs` section.
"""
scrape_configs = super()._scrape_configs()
scrape_configs["scrape_configs"].append(
{
"job_name": "access_log_exporter",
"static_configs": [{"labels": {"__path__": "/var/log/apache2/access.*.log"}}],
"pipeline_stages": [
{
"logfmt": {
"mapping": {
"request_duration_microseconds": "request_duration_microseconds",
"content_type": "content_type",
"path": "path",
}
}
},
{"labels": {"content_type": "content_type", "path": "path"}},
{
"match": {
"selector": '{path=~"^/server-status.*$"}',
"action": "drop",
}
},
{"labeldrop": ["filename", "path"]},
{
"metrics": {
"request_duration_microseconds": {
"type": "Histogram",
"source": "request_duration_microseconds",
"prefix": "apache_access_log_",
"config": {"buckets": REQUEST_DURATION_MICROSECONDS_BUCKETS},
}
}
},
{"drop": {"expression": ".*"}},
],
}
)
return scrape_configs
Loading
Loading