Skip to content

Commit

Permalink
feat(api): add log downloader documentation
Browse files Browse the repository at this point in the history
Part of #86
Closes #817
  • Loading branch information
murilx committed Feb 5, 2025
1 parent fcffb6e commit 0db3cbb
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 8 deletions.
17 changes: 17 additions & 0 deletions backend/kernelCI_app/typeModels/logDownloader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from typing import List
from pydantic import BaseModel


class LogData(BaseModel):
specific_log_url: str
file_name: str
file_size: str
date: str


class LogDownloaderQueryParameters(BaseModel):
log_download_url: str


class LogDownloaderResponse(BaseModel):
log_files: List[LogData]
26 changes: 19 additions & 7 deletions backend/kernelCI_app/views/logDownloaderView.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from django.http import JsonResponse, HttpResponseBadRequest
from kernelCI_app.utils import getErrorResponseBody
from rest_framework.views import APIView
from bs4 import BeautifulSoup, Tag
import requests
from http import HTTPStatus
from kernelCI_app.helpers.errorHandling import create_error_response
from kernelCI_app.helpers.errorHandling import create_api_error_response
from kernelCI_app.typeModels.logDownloader import LogDownloaderResponse, LogDownloaderQueryParameters
from rest_framework.views import APIView
from rest_framework.response import Response
from drf_spectacular.utils import extend_schema
from pydantic import ValidationError


def scrape_log_data(url):
Expand Down Expand Up @@ -55,17 +57,27 @@ def scrape_log_data(url):


class LogDownloaderView(APIView):
@extend_schema(
responses=LogDownloaderResponse,
parameters=[LogDownloaderQueryParameters],
methods=["GET"],
)
def get(self, request):
log_download_url = request.GET.get("log_download_url")
parsed_data = scrape_log_data(log_download_url)

error_message = parsed_data.get("error")
if error_message:
return HttpResponseBadRequest(getErrorResponseBody(error_message))
return create_api_error_response(error_message=error_message)

if not parsed_data['log_files']:
return create_error_response(
return create_api_error_response(
error_message="No log files found", status_code=HTTPStatus.OK
)

return JsonResponse(parsed_data)
try:
valid_reponse = LogDownloaderResponse(**parsed_data)
except ValidationError as e:
return Response(data=e.json(), status=HTTPStatus.INTERNAL_SERVER_ERROR)

return Response(valid_reponse.model_dump())
45 changes: 44 additions & 1 deletion backend/schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,13 @@ paths:
/api/log-downloader/:
get:
operationId: log_downloader_retrieve
parameters:
- in: query
name: log_download_url
schema:
title: Log Download Url
type: string
required: true
tags:
- log-downloader
security:
Expand All @@ -295,7 +302,11 @@ paths:
- {}
responses:
'200':
description: No response body
content:
application/json:
schema:
$ref: '#/components/schemas/LogDownloaderResponse'
description: ''
/api/schema/:
get:
operationId: schema_retrieve
Expand Down Expand Up @@ -1656,6 +1667,38 @@ components:
- issues
title: LocalFilters
type: object
LogData:
properties:
specific_log_url:
title: Specific Log Url
type: string
file_name:
title: File Name
type: string
file_size:
title: File Size
type: string
date:
title: Date
type: string
required:
- specific_log_url
- file_name
- file_size
- date
title: LogData
type: object
LogDownloaderResponse:
properties:
log_files:
items:
$ref: '#/components/schemas/LogData'
title: Log Files
type: array
required:
- log_files
title: LogDownloaderResponse
type: object
Origin:
type: string
Summary:
Expand Down

0 comments on commit 0db3cbb

Please sign in to comment.