Skip to content

Commit

Permalink
Run data recover at night
Browse files Browse the repository at this point in the history
  • Loading branch information
sdunesme committed Oct 26, 2022
1 parent 3667de7 commit 2281777
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 5 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""create planned recoveries table
Revision ID: 251ffeecab94
Revises: a17e2ea61402
Create Date: 2022-10-26 12:42:13.485167
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '251ffeecab94'
down_revision = 'a17e2ea61402'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('planned_recoveries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('common_name', sa.String(length=120), nullable=False),
sa.Column('from_date', sa.DateTime(), nullable=False),
sa.Column('to_date', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('planned_recoveries')
# ### end Alembic commands ###
29 changes: 26 additions & 3 deletions woodcamrm/api_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@

from flask_restx import Resource, fields, inputs
from woodcamrm import api
from woodcamrm.db import Stations, Users
from woodcamrm.extensions import dbsql
from woodcamrm.db import Stations, Users, PlannedRecoveries


auth = HTTPBasicAuth()
Expand Down Expand Up @@ -163,7 +164,21 @@ class PlanRecovery(Resource):
@datarec_ns.doc(description='Plan data recovery for the next night')
@api.expect(clip_parser)
def post(self) -> None:
return True

args = clip_parser.parse_args()
station = args['station']
from_date = args['from_date'].astimezone(pytz.UTC).replace(tzinfo=None)
to_date = args['to_date'].astimezone(pytz.UTC).replace(tzinfo=None)

#TODO: check if already planned before adding it
planned_rec = PlannedRecoveries(common_name=station,
from_date=from_date,
to_date=to_date)

dbsql.session.add(planned_rec)
dbsql.session.commit()

return {'success': True}


@datarec_ns.route('/list_recovery')
Expand All @@ -172,4 +187,12 @@ class PlanRecovery(Resource):

@datarec_ns.doc(description='List data recovery planned for the next night')
def get(self) -> None:
return True

planned_rec = PlannedRecoveries.query.all()
results = {item.id: {'station': item.common_name,
'from_date': item.from_date.strftime('%Y-%m-%dT%H:%M:%S%z'),
'to_date': item.to_date.strftime('%Y-%m-%dT%H:%M:%S%z')} for item in planned_rec}

results = json.dumps(results, use_decimal=True)

return {'data': results}
7 changes: 7 additions & 0 deletions woodcamrm/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,13 @@ def __repr__(self):
return '<Job %r>' % self.job_name


class PlannedRecoveries(dbsql.Model):
id = dbsql.Column(dbsql.Integer, primary_key=True)
common_name = dbsql.Column(dbsql.String(120), nullable=False)
from_date = dbsql.Column(dbsql.DateTime, nullable=False)
to_date = dbsql.Column(dbsql.DateTime, nullable=False)


class Settings(dbsql.Model):
id = dbsql.Column(dbsql.Integer, primary_key=True)
parameter = dbsql.Column(dbsql.String, unique=True, nullable=False)
Expand Down
34 changes: 32 additions & 2 deletions woodcamrm/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import time
import json

from ftplib import FTP, error_perm
from datetime import datetime
from suntime import Sun

Expand All @@ -16,7 +15,7 @@
from woodcamrm import save_video_file
from woodcamrm.auth import login_required
from woodcamrm.extensions import scheduler, dbsql
from woodcamrm.db import RecordMode, Stations, Jobs, Users
from woodcamrm.db import RecordMode, Stations, Jobs, PlannedRecoveries

bp = Blueprint("jobs", __name__, url_prefix="/jobs")

Expand Down Expand Up @@ -322,8 +321,39 @@ def records_check():
jb.last_execution = datetime.now()
jb.state = 4
dbsql.session.commit()


@scheduler.task(
"cron",
id="recover_data",
max_instances=1,
minute='30',
hour='23'
)
def recover_data():
with scheduler.app.app_context():

stations = Stations.query.filter(Stations.storage_path != None).all()

for st in stations:
#TODO: Check prometheus data to plan recovery automatically
#TODO: Parallelize by station

planned_rec = PlannedRecoveries.query.filter(PlannedRecoveries.common_name == st.common_name).all()

if planned_rec:
for record in planned_rec:
r = requests.post(f'http://127.0.0.1:5000/api/v1/datarecovery/download_record',
auth= (scheduler.app.config["DEFAULT_USER"],scheduler.app.config["DEFAULT_PASSWORD"]),
data={'station': record.common_name,
'from_date': record.from_date.strftime('%Y-%m-%dT%H:%M:%S%z'),
'to_date': record.to_date.strftime('%Y-%m-%dT%H:%M:%S%z')})

if r.status_code == 200:
dbsql.session.delete(record)
dbsql.commit()


########################
# Manual jobs operations
########################
Expand Down

0 comments on commit 2281777

Please sign in to comment.