Skip to content

Commit

Permalink
Add json logs
Browse files Browse the repository at this point in the history
  • Loading branch information
liviuba committed Jul 3, 2024
1 parent 073b143 commit eb20251
Show file tree
Hide file tree
Showing 6 changed files with 140 additions and 23 deletions.
1 change: 1 addition & 0 deletions api/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ ome-types = "^0.4.2"
# not directly needed except the latest version crashes the api on start
xsdata = "23.8"
lxml = "^4.9.3"
json-log-formatter = "^1.0"

[tool.poetry.group.dev.dependencies]
locust = "^2.16.1"
Expand Down
36 changes: 16 additions & 20 deletions api/src/api/private.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .auth import get_current_user
from . import constants
from uuid import UUID
import logging
from ..logging import log_info

from typing import List
from fastapi import APIRouter, status, Depends, UploadFile, Response
Expand All @@ -25,7 +25,7 @@ async def create_study(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> None:
logging.info(f"Creating study {study.accession_id}")
log_info(f"Creating study {study.accession_id}")
db.overwrite_mode = overwrite_mode
if study.version != 0:
raise exceptions.InvalidRequestException(
Expand All @@ -38,7 +38,7 @@ async def create_study(

@router.patch("/studies", status_code=status.HTTP_200_OK)
async def update_study(study: db_models.BIAStudy, db: Repository = Depends()) -> None:
logging.info(f"Updating study {study.accession_id}. New version: {study.version}")
log_info(f"Updating study {study.accession_id}. New version: {study.version}")
await db.update_doc(study)

return None
Expand All @@ -50,7 +50,7 @@ async def update_study(study: db_models.BIAStudy, db: Repository = Depends()) ->
async def study_refresh_counts(study_uuid: str, db: Repository = Depends()) -> None:
"""Recalculate and persist counts for other objects pointing to this study."""

logging.info(f"Recalculating reference counts for study {study_uuid}")
log_info(f"Recalculating reference counts for study {study_uuid}")
await db.study_refresh_counts(study_uuid)

return None
Expand All @@ -63,7 +63,7 @@ async def create_images(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> api_models.BulkOperationResponse:
logging.info(
log_info(
f"Creating {len(study_images)} images. First image attached to study: {study_images[0].study_uuid if len(study_images) else 'EMPTY LIST'}"
)
db.overwrite_mode = overwrite_mode
Expand Down Expand Up @@ -102,9 +102,7 @@ async def update_image(
"""Bulk update not available - update_many only has one filter for the entire update
@TODO: Find common bulk update usecases and map them to mongo operations"""

logging.info(
f"Updating image {study_image.uuid}. New version: {study_image.version}"
)
log_info(f"Updating image {study_image.uuid}. New version: {study_image.version}")

await db.validate_object_dependency(
study_image,
Expand Down Expand Up @@ -132,7 +130,7 @@ async def create_image_representation(
representation: db_models.BIAImageRepresentation,
db: Repository = Depends(),
) -> None:
logging.info(f"Adding an image representation to image {image_uuid}")
log_info(f"Adding an image representation to image {image_uuid}")
await db.list_item_push(image_uuid, "representations", representation)

return None
Expand All @@ -145,7 +143,7 @@ async def create_file_references(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> api_models.BulkOperationResponse:
logging.info(
log_info(
f"Creating {len(file_references)} file references. First file reference attached to study: {file_references[0].study_uuid if len(file_references) else 'EMPTY LIST'}"
)
db.overwrite_mode = overwrite_mode
Expand Down Expand Up @@ -180,7 +178,7 @@ async def create_file_references(
async def update_file_reference(
file_reference: db_models.FileReference, db: Repository = Depends()
) -> None:
logging.info(
log_info(
f"Updating file reference {file_reference.uuid}. New version: {file_reference.version}"
)
# await db.validate_uuid_type(file_reference.study_uuid, db_models.BIAStudy)
Expand All @@ -201,7 +199,7 @@ async def create_collection(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> None:
logging.info(f"Creating collection {collection.uuid}")
log_info(f"Creating collection {collection.uuid}")
db.overwrite_mode = overwrite_mode
for study_uuid in collection.study_uuids:
await db.find_study_by_uuid(study_uuid)
Expand All @@ -220,7 +218,7 @@ async def create_image_acquisition(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> None:
logging.info(f"Creating Image Acquisition {image_acquisition.uuid}")
log_info(f"Creating Image Acquisition {image_acquisition.uuid}")
db.overwrite_mode = overwrite_mode
# await db.validate_uuid_type(image_acquisition.specimen_uuid, db_models.Specimen)
await db.validate_object_dependency(
Expand All @@ -238,7 +236,7 @@ async def create_image_acquisition(
async def update_image_acquisition(
image_acquisition: db_models.ImageAcquisition, db: Repository = Depends()
) -> None:
logging.info(
log_info(
f"Updating Image acquisition {image_acquisition.uuid}. New version: {image_acquisition.version}"
)
# await db.validate_uuid_type(image_acquisition.specimen_uuid, db_models.Specimen)
Expand All @@ -259,7 +257,7 @@ async def create_specimen(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> None:
logging.info(f"Creating specimen {specimen.uuid}")
log_info(f"Creating specimen {specimen.uuid}")
db.overwrite_mode = overwrite_mode
# await db.validate_uuid_type(specimen.biosample_uuid, db_models.Biosample)
await db.validate_object_dependency(
Expand All @@ -277,7 +275,7 @@ async def create_specimen(
async def update_specimen(
specimen: db_models.Specimen, db: Repository = Depends()
) -> None:
logging.info(f"Updating Specimen {specimen.uuid}. New version: {specimen.version}")
log_info(f"Updating Specimen {specimen.uuid}. New version: {specimen.version}")
# await db.validate_uuid_type(specimen.biosample_uuid, db_models.Biosample)
await db.validate_object_dependency(
specimen,
Expand All @@ -296,7 +294,7 @@ async def create_biosample(
db: Repository = Depends(),
overwrite_mode: OverwriteMode = OverwriteMode.FAIL,
) -> None:
logging.info(f"Creating Biosample {biosample.uuid}")
log_info(f"Creating Biosample {biosample.uuid}")
db.overwrite_mode = overwrite_mode
await db.persist_doc(biosample)

Expand All @@ -307,9 +305,7 @@ async def create_biosample(
async def update_biosample(
biosample: db_models.Biosample, db: Repository = Depends()
) -> None:
logging.info(
f"Updating Biosample {biosample.uuid}. New version: {biosample.version}"
)
log_info(f"Updating Biosample {biosample.uuid}. New version: {biosample.version}")
await db.update_doc(biosample)

return None
Expand Down
1 change: 1 addition & 0 deletions api/src/api/public.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from .exceptions import DocumentNotFound, InvalidRequestException
from . import constants
from .annotator import annotator, Annotator
from ..logging import log_info, log_error

from typing import List, Optional, Annotated
from fastapi import APIRouter, Query, Depends, Body
Expand Down
5 changes: 2 additions & 3 deletions api/src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from .api import admin
from .api import auth
from .models.repository import repository_create, Repository
from .logging import log_error

import uvicorn
from fastapi import FastAPI, Depends
Expand All @@ -17,7 +18,6 @@
from starlette.responses import JSONResponse
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR
from fastapi.middleware.gzip import GZipMiddleware
import logging

app = FastAPI(
generate_unique_id_function=lambda route: route.name,
Expand All @@ -37,7 +37,7 @@

@app.exception_handler(Exception)
async def log_exception_handler(request: Request, exc: Exception):
logging.error("Unhandled exception:", exc_info=True)
log_error("Unhandled exception:", exc_info=True)

return JSONResponse(
{"detail": "Internal server error"}, status_code=HTTP_500_INTERNAL_SERVER_ERROR
Expand Down Expand Up @@ -84,5 +84,4 @@ async def custom_redoc():
)

if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
uvicorn.run(app, host="0.0.0.0", port=8080)
48 changes: 48 additions & 0 deletions api/src/log_config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# ! time formats need to always be consistent
# string datetime in ISO 8601 format (datetime default) preferred

version: 1
disable_existing_loggers: true
formatters:
json_request:
class: src.logging.AccesslogFormatter
json_error:
class: src.logging.VerboseJSONFormatter
json_catchall:
class: src.logging.VerboseJSONFormatter
json_api:
class: src.logging.APILogFormatter
handlers:
json_api:
class: logging.StreamHandler
stream: ext://sys.stdout
formatter: json_api
json_request:
class: logging.StreamHandler
stream: ext://sys.stdout
formatter: json_request
json_error:
class: logging.StreamHandler
stream: ext://sys.stderr
formatter: json_error
json_catchall:
# this is just so we never drop logs, nothing should be handled by this handler
class: logging.StreamHandler
stream: ext://sys.stdout
formatter: json_catchall
loggers:
uvicorn.access:
level: INFO
handlers: [json_request]
propagate: false
uvicorn.error:
level: ERROR
handlers: [json_error]
propagate: false
bia.api:
level: INFO
handlers: [json_api]
propagate: false
root:
level: INFO
handlers: [json_catchall]
72 changes: 72 additions & 0 deletions api/src/logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import logging
import datetime

import json_log_formatter


class APILogFormatter(json_log_formatter.JSONFormatter):
def json_record(
self,
message: str,
extra: dict[str, str | int | float],
record: logging.LogRecord,
) -> dict[str, str | int | float]:
return dict(
level=record.levelname,
time=datetime.datetime.fromtimestamp(record.created),
message=message,
extra=extra,
)


class AccesslogFormatter(json_log_formatter.JSONFormatter):
def json_record(
self,
message: str,
extra: dict[str, str | int | float],
record: logging.LogRecord,
) -> dict[str, str | int | float]:
return dict(
level=record.levelname,
response_time_ms=int(record.msecs),
time=datetime.datetime.fromtimestamp(record.created),
source=record.args[0],
verb=record.args[1],
path=record.args[2],
status=record.args[4],
)


class VerboseJSONFormatter(json_log_formatter.VerboseJSONFormatter):
"""
Wrapper to make logs uniform / easy to search
"""

def json_record(
self,
message: str,
extra: dict[str, str | int | float],
record: logging.LogRecord,
):
log_item = super().json_record(message, extra, record)
log_item["level"] = log_item.pop("levelname")

return log_item


"""
Wrappers to make sure we always do logs right
- never use a global logger to avoid locks
- don't DI the logger to avoid messyness
Name tweaked to avoid mixup with logging functions
"""


def log_info(msg, *args, **kwargs):
logger = logging.getLogger("bia.api")
return logger.info(msg, *args, **kwargs)


def log_error(msg, *args, **kwargs):
logger = logging.getLogger("bia.api")
return logger.error(msg, *args, **kwargs)

0 comments on commit eb20251

Please sign in to comment.