Skip to content

Commit

Permalink
Add flask-migrate
Browse files Browse the repository at this point in the history
  • Loading branch information
sdunesme committed Apr 26, 2022
1 parent 259e459 commit 29d8c05
Show file tree
Hide file tree
Showing 14 changed files with 255 additions and 34 deletions.
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ cp .env.example .env
vi .env

# Initialize database
flask init-db
flask init-db # Only the first time to seed the first data
flask db upgrade # At each woodcamrm version upgrade

# Run application
flask run
Expand Down Expand Up @@ -90,7 +91,8 @@ sudo systemctl start woodcamrm.service
docker-compose up -d

# Create database
docker exec woodcam-rm_app_1 flask init-db -y
docker exec woodcam-rm_app_1 flask init-db # Only the first time to seed the first data
docker exec woodcam-rm_app_1 flask db upgrade # At each woodcamrm version upgrade

# Restart services
docker-compose restart
Expand Down
1 change: 1 addition & 0 deletions migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Single-database configuration for Flask.
50 changes: 50 additions & 0 deletions migrations/alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# A generic, single database configuration.

[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false


# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
98 changes: 98 additions & 0 deletions migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from __future__ import with_statement

import logging
from logging.config import fileConfig

from flask import current_app

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

def include_object(object, name, type_, reflected, compare_to):
if type_ == "table" and name == 'spatial_ref_sys':
return False
else:
return True


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True, include_object=include_object
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""

# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')

connectable = current_app.extensions['migrate'].db.get_engine()

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
include_object=include_object,
**current_app.extensions['migrate'].configure_args
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
28 changes: 28 additions & 0 deletions migrations/versions/6d037e99c04d_initial_migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""initial migration
Revision ID: 6d037e99c04d
Revises:
Create Date: 2022-04-26 13:25:12.988954
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '6d037e99c04d'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_stations_geom', table_name='stations')
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index('idx_stations_geom', 'stations', ['geom'], unique=False)
# ### end Alembic commands ###
34 changes: 34 additions & 0 deletions migrations/versions/77b752ddd223_add_recording_parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""add recording parameters
Revision ID: 77b752ddd223
Revises: 6d037e99c04d
Create Date: 2022-04-26 13:31:59.356854
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '77b752ddd223'
down_revision = '6d037e99c04d'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('stations', sa.Column('rec_resolution', sa.String(length=12), nullable=True))
op.add_column('stations', sa.Column('rec_fps', sa.Integer(), nullable=True))
op.add_column('stations', sa.Column('rec_codec', sa.String(length=12), nullable=True))
op.add_column('stations', sa.Column('rec_compression', sa.Integer(), nullable=True))
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('stations', 'rec_compression')
op.drop_column('stations', 'rec_codec')
op.drop_column('stations', 'rec_fps')
op.drop_column('stations', 'rec_resolution')
# ### end Alembic commands ###
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
'flask-mqtt',
'flask-mail',
'flask-wtf',
'flask-migrate',
'psycopg2-binary',
'requests',
'python-dotenv',
Expand Down
12 changes: 2 additions & 10 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@

import pytest
from woodcamrm import create_app
from woodcamrm.db import get_db, init_db

with open(os.path.join(os.path.dirname(__file__), 'data.sql'), 'rb') as f:
_data_sql = f.read().decode('utf8')
from woodcamrm.db import init_db


@pytest.fixture
Expand All @@ -18,11 +15,6 @@ def app():

with app.app_context():
init_db()
db = get_db()
cur = db.cursor()
cur.execute(_data_sql)
cur.close()
db.commit()

yield app

Expand All @@ -41,7 +33,7 @@ class AuthActions(object):
def __init__(self, client):
self._client = client

def login(self, username='test', password='test'):
def login(self, username=app.config['DEFAULT_USER'], password=app.config['DEFAULT_PASSWORD']):
return self._client.post(
'/auth/login',
data={'username': username, 'password': password}
Expand Down
22 changes: 2 additions & 20 deletions tests/test_db.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,4 @@
import psycopg2

import pytest
from woodcamrm.db import get_db


# def test_get_close_db(app):
# with app.app_context():
# db = get_db()
# assert db is get_db()

# with pytest.raises(psycopg2.ProgrammingError) as e:
# cur = db.cursor()
# db.execute('SELECT 1;')
# cur.close()
# db.commit()

# assert 'closed' in str(e.value)


def test_init_db_command(runner, monkeypatch):
class Recorder(object):
Expand All @@ -26,6 +8,6 @@ def fake_init_db():
Recorder.called = True

monkeypatch.setattr('woodcamrm.db.init_db', fake_init_db)
result = runner.invoke(args=['init-db'])
assert 'Initialized' in result.output
result = runner.invoke(args=['init-db -y'])
assert 'Initialized the database.' in result.output
assert Recorder.called
3 changes: 2 additions & 1 deletion woodcamrm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from celery import Celery

from woodcamrm.extensions import mqtt, dbsql, scheduler, mail
from woodcamrm.extensions import mqtt, dbsql, scheduler, mail, migrate
from woodcamrm.db import Stations


Expand Down Expand Up @@ -51,6 +51,7 @@ def create_app(test_config=None):

dbsql.init_app(app)
db.init_app(app)
migrate.init_app(app, dbsql)

with app.app_context():
try:
Expand Down
5 changes: 5 additions & 0 deletions woodcamrm/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,11 @@ class Stations(dbsql.Model):
sd_disruption = dbsql.Column(dbsql.Integer, nullable=False, default=0)
tampering = dbsql.Column(dbsql.Integer, nullable=False, default=0)

rec_resolution = dbsql.Column(dbsql.String(12))
rec_fps = dbsql.Column(dbsql.Integer, default=3)
rec_codec = dbsql.Column(dbsql.String(12), default="h264")
rec_compression = dbsql.Column(dbsql.Integer, default=30)

jan_threshold = dbsql.Column(dbsql.Numeric)
feb_threshold = dbsql.Column(dbsql.Numeric)
mar_threshold = dbsql.Column(dbsql.Numeric)
Expand Down
4 changes: 3 additions & 1 deletion woodcamrm/extensions.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_apscheduler import APScheduler
from flask_mqtt import Mqtt
from flask_mail import Mail

dbsql = SQLAlchemy()
scheduler = APScheduler()
mqtt = Mqtt()
mail = Mail()
mail = Mail()
migrate = Migrate()
1 change: 1 addition & 0 deletions woodcamrm/station.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class StationForm(FlaskForm):
installation_port = IntegerField('Installation ping port', validators=[Optional()])
snmp_received = StringField('SNMP MIB for received data', validators=[Optional()])
snmp_transmitted = StringField('SNMP MIB for transmitted data', validators=[Optional()])

jan_threshold = DecimalField('Water level threshold january (mm)', validators=[Optional()])
feb_threshold = DecimalField('Water level threshold february (mm)', validators=[Optional()])
mar_threshold = DecimalField('Water level threshold march (mm)', validators=[Optional()])
Expand Down

0 comments on commit 29d8c05

Please sign in to comment.