Skip to content

Commit

Permalink
Merge pull request #294 from uhh-lt/fix-database-in-tests
Browse files Browse the repository at this point in the history
Give tests their own database
  • Loading branch information
bigabig authored Jan 8, 2024
2 parents 12df3bf + c895afd commit d18aff8
Show file tree
Hide file tree
Showing 12 changed files with 66 additions and 26 deletions.
3 changes: 3 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
root = true

[*]
insert_final_newline = true

[*.{yml,yaml}]
indent_size = 2
indent_style = space
6 changes: 4 additions & 2 deletions .github/workflows/backend_check_schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ on:
jobs:
check-schema:
runs-on: ubuntu-latest
env:
API_PRODUCTION_WORKERS: 1
RAY_ENABLED: False
COMPOSE_PROFILES: ""
steps:
- uses: actions/checkout@v3
- name: Start Docker Containers
Expand All @@ -19,8 +23,6 @@ jobs:
chmod -R a+rwx backend_repo/ models_cache/ spacy_models/
python monkey_patch_docker_compose_for_backend_tests.py --disable_ray
export GID=$(id -g)
export API_PRODUCTION_WORKERS=0
export RAY_CONFIG="./config_test_no_gpu.yaml"
docker compose -f compose-test.yml up -d --quiet-pull postgres
echo Waiting for containers to start...
sleep 10
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/backend_e2e_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ jobs:
chmod -R a+rwx backend_repo/ models_cache/ spacy_models/
python monkey_patch_docker_compose_for_backend_tests.py
export GID=$(id -g)
export API_PRODUCTION_WORKERS=0
export API_PRODUCTION_WORKERS=1
export RAY_CONFIG="./config_test_no_gpu.yaml"
docker compose -f compose-test.yml up -d
echo Waiting for containers to start...
Expand Down
10 changes: 6 additions & 4 deletions .github/workflows/backend_unit_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,12 @@ jobs:
echo "Environment: $ENVIRONMENT"
run-unit-tests:
runs-on: ubuntu-latest
environment:
API_PRODUCTION_WORKERS: 1
# disable backend and frontend
COMPOSE_PROFILES: ""
RAY_ENABLED: False
POSTRES_DB: dwts-test
steps:
- name: Set Swap Space to 10GB
uses: pierotofy/set-swap-space@master
Expand All @@ -61,10 +67,6 @@ jobs:
chmod -R a+rwx backend_repo/ models_cache/ spacy_models/
python monkey_patch_docker_compose_for_backend_tests.py --disable_ray
export GID=$(id -g)
export API_PRODUCTION_WORKERS=0
# disable backend and frontend
export COMPOSE_PROFILES=
export RAY_ENABLED=False
docker compose -f compose-test.yml up -d --quiet-pull
echo Waiting for containers to start...
sleep 30
Expand Down
7 changes: 4 additions & 3 deletions .github/workflows/update-openapi-spec.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ on:
jobs:
update-openapi-spec:
runs-on: ubuntu-latest
env:
API_PRODUCTION_WORKERS: 1
RAY_ENABLED: False
API_EXPOSED: 5500
steps:
- uses: actions/checkout@v3
- name: Checkout PR
Expand All @@ -24,9 +28,6 @@ jobs:
chmod -R a+rwx backend_repo/ models_cache/ spacy_models/
python monkey_patch_docker_compose_for_backend_tests.py --disable_ray
export GID=$(id -g)
export API_PRODUCTION_WORKERS=0
export RAY_ENABLED=False
export API_EXPOSED=5500
docker compose -f compose-test.yml up -d --quiet-pull
echo Waiting for containers to start...
- name: Setup node
Expand Down
8 changes: 8 additions & 0 deletions backend/.env.testing.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Copy this file to .env.testing and edit it to match your preferences
# Use it in conjunction with your normal env:
# source .env; source .env.testing
# Before running your tests.
# This way, you can keep only necessary overrides in .env.testing

RAY_ENABLED=False
POSTGRES_DB=dwts-testing
1 change: 1 addition & 0 deletions backend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ backend/logs

# Local configuration
.env
.env.testing

# npm stuff for python readability
src/node_modules
Expand Down
33 changes: 21 additions & 12 deletions backend/src/app/core/db/sql_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,11 @@

from loguru import logger
from pydantic import PostgresDsn
from sqlalchemy import create_engine
from sqlalchemy import create_engine, inspect
from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database

from app.core.data.orm.orm_base import ORMBase
from app.core.db.import_all_orms import * # noqa: F401, F403
from app.util.singleton_meta import SingletonMeta
from config import conf
Expand Down Expand Up @@ -37,6 +36,10 @@ def __new__(cls, *args, **kwargs):
cls.__engine: Engine = engine
cls.session_maker = sessionmaker(autoflush=False, bind=engine)

if kwargs.get("reset_database") is True:
logger.warning("Dropping existing DB!")
drop_database(cls.__engine.url)

return super(SQLService, cls).__new__(cls)

except Exception as e:
Expand All @@ -47,23 +50,29 @@ def __new__(cls, *args, **kwargs):
def __del__(self):
self.__engine.dispose()

# This method is unused and only left here for historic reference
def _create_database_and_tables(self, drop_if_exists: bool = False) -> None:
logger.info("Setting up PostgresSQL DB and tables...")
if drop_if_exists and database_exists(self.__engine.url):
logger.warning("Dropping existing DB!")
drop_database(self.__engine.url)
def drop_database(self):
logger.warning("Dropping existing DB!")
drop_database(self.__engine.url)

def create_database_if_not_exists(self):
if not database_exists(self.__engine.url):
# create the DB
create_database(self.__engine.url)
logger.debug("Created DB!")

# create all tables from SQLAlchemy ORM Models
ORMBase.metadata.create_all(self.__engine)
logger.debug("Created Tables!")
def database_contains_data(self):
if not database_exists(self.__engine.url):
return False

inspector = inspect(self.__engine)
schemas = inspector.get_schema_names()

for schema in schemas:
print("schema: %s" % schema)
if len(inspector.get_table_names(schema=schema)) > 0:
return True

logger.info("Done setting up PostgresSQL DB and tables!")
return False

@contextmanager
def db_session(self) -> Generator[Session, None, None]:
Expand Down
4 changes: 2 additions & 2 deletions backend/src/app/core/startup.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,10 @@ def __init_services__(
repos = RepoService()
if create_root_repo_directory_structure:
repos._create_root_repo_directory_structure(remove_if_exists=reset_repo)
# create SQL DBs and Tables # TODO Flo: Alembic
# create SQL DBs and Tables
from app.core.db.sql_service import SQLService

SQLService(echo=sql_echo)
SQLService(echo=sql_echo, reset_database=reset_database)
# import and init ElasticSearch
from app.core.search.elasticsearch_service import ElasticSearchService

Expand Down
1 change: 1 addition & 0 deletions backend/src/migration/migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@ def run_required_migrations():


def __migrate_database_schema() -> None:
SQLService().create_database_if_not_exists()
config = Config("alembic.ini")
upgrade(config, "head")
print("MIGRATED DB SCHEMA!")
Expand Down
16 changes: 15 additions & 1 deletion backend/src/test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,27 @@
import pytest
from fastapi import Request
from fastapi.datastructures import Headers
from loguru import logger

from app.core.authorization.authz_user import AuthzUser
from app.core.data.orm.project import ProjectORM
from app.core.db.sql_service import SQLService
from app.core.startup import startup
from config import conf

os.environ["RAY_ENABLED"] = "False"

# Flo: just do it once. We have to check because if we start the main function, unvicorn will import this
# file once more manually, so it would be executed twice.
STARTUP_DONE = bool(int(os.environ.get("STARTUP_DONE", "0")))
if not STARTUP_DONE:
if SQLService().database_contains_data():
# Make sure we don't accidentally delete important data
logger.error(
f"Database '{conf.postgres.db}' is not empty. The tests will only run given a database without any tables in it."
)
exit(1)

startup(reset_data=True)
os.environ["STARTUP_DONE"] = "1"

Expand All @@ -31,7 +41,11 @@
from app.core.data.dto.code import CodeCreate, CodeRead
from app.core.data.dto.project import ProjectCreate
from app.core.data.dto.user import UserCreate, UserRead
from app.core.db.sql_service import SQLService


def pytest_sessionfinish():
# Make sure the next test session starts with a clean database
SQLService().drop_database()


# Always use the asyncio backend for async tests
Expand Down
1 change: 0 additions & 1 deletion docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ services:
environment:
POSTGRES_USER: ${POSTGRES_USER:-dwtsuser}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-dwts123}
POSTGRES_DB: dwts
ports:
- "${POSTGRES_EXPOSED:-5432}:5432"
volumes:
Expand Down

0 comments on commit d18aff8

Please sign in to comment.