Skip to content

Commit

Permalink
add logger config
Browse files Browse the repository at this point in the history
  • Loading branch information
cmelone committed Mar 3, 2024
1 parent f9eab5e commit b43f92e
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 10 deletions.
8 changes: 8 additions & 0 deletions gantry/__main__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os

import aiosqlite
Expand All @@ -7,6 +8,13 @@
from gantry.clients.prometheus import PrometheusClient
from gantry.views import routes

logger = logging.getLogger(__name__)
logging.basicConfig(
level=os.environ.get("LOG_LEVEL", "WARNING"),
format="[%(asctime)s] (%(name)s:%(lineno)d) %(levelname)s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)


async def init_db(app: web.Application):
db = await aiosqlite.connect(os.environ["DB_FILE"])
Expand Down
6 changes: 4 additions & 2 deletions gantry/clients/prometheus/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from gantry.clients.prometheus.job import PrometheusJobClient
from gantry.clients.prometheus.node import PrometheusNodeClient

logger = logging.getLogger(__name__)


class PrometheusClient:
def __init__(self, base_url: str, auth_cookie: str = ""):
Expand Down Expand Up @@ -72,7 +74,7 @@ async def _query(self, url: str) -> list:
try:
return self.prettify_res(await resp.json())
except aiohttp.ContentTypeError:
logging.error(
logger.error(
"""Prometheus query failed with unexpected response.
The cookie may have expired."""
)
Expand All @@ -87,7 +89,7 @@ def prettify_res(self, response: dict) -> list:
}

if result_type not in values_dict:
logging.error(f"Prometheus response type {result_type} not supported")
logger.error(f"Prometheus response type {result_type} not supported")
return []

return [
Expand Down
9 changes: 3 additions & 6 deletions gantry/db/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import aiosqlite

logger = logging.getLogger(__name__)


async def get_node(db: aiosqlite.Connection, uuid: str) -> int | None:
"""return the primary key if found, otherwise return None"""
Expand All @@ -20,12 +22,7 @@ async def job_exists(db: aiosqlite.Connection, gl_id: int) -> bool:
"select id from jobs where gitlab_id = ?", (gl_id,)
) as cursor:
if await cursor.fetchone():
logging.warning(
f"""
job {gl_id} already in database.
check why multiple requests are being sent.
"""
)
logger.warning(f"job {gl_id} exists. look into duplicate webhook calls.")
return True

return False
Expand Down
4 changes: 3 additions & 1 deletion gantry/routes/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

MB_IN_BYTES = 1_000_000

logger = logging.getLogger(__name__)


async def fetch_job(
payload: dict,
Expand Down Expand Up @@ -65,7 +67,7 @@ async def fetch_job(
node_id = await fetch_node(db_conn, prometheus, node_hostname, job.midpoint)
except IncompleteData as e:
# missing data, skip this job
logging.error(f"{e} job={job.gl_id}")
logger.error(f"{e} job={job.gl_id}")
return

await db.insert_job(
Expand Down
3 changes: 2 additions & 1 deletion gantry/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from gantry.routes.collection import fetch_job

logger = logging.getLogger(__name__)
routes = web.RouteTableDef()


Expand All @@ -21,7 +22,7 @@ async def collect_job(request: web.Request) -> web.Response:
return web.Response(status=401, text="invalid token")

if request.headers.get("X-Gitlab-Event") != "Job Hook":
logging.error(f"invalid event type {request.headers.get('X-Gitlab-Event')}")
logger.error(f"invalid event type {request.headers.get('X-Gitlab-Event')}")
# return 200 so gitlab doesn't disable the webhook -- this is not fatal
return web.Response(status=200)

Expand Down

0 comments on commit b43f92e

Please sign in to comment.