diff --git a/human_experiments/datasette_interface/bin/clear_database.py b/human_experiments/datasette_interface/bin/clear_database.py index c1b707889..2b3c77e68 100755 --- a/human_experiments/datasette_interface/bin/clear_database.py +++ b/human_experiments/datasette_interface/bin/clear_database.py @@ -1,13 +1,47 @@ #!/usr/bin/env python -from datasette_interface.database.config import engine -from datasette_interface.database.entity.base.base import Base +from datasette_interface.database.config import engine, Base +from datasette_interface.database.entity.base.data_validity import DataValidity # noqa f401 +from datasette_interface.database.entity.base.eeg_device import EEGDevice # noqa f401 +from datasette_interface.database.entity.base.group_session import GroupSession # noqa f401 +from datasette_interface.database.entity.base.modality import Modality # noqa f401 +from datasette_interface.database.entity.base.participant import Participant # noqa f401 +from datasette_interface.database.entity.base.post_game_survey import PostGameSurvey # noqa f401 +from datasette_interface.database.entity.base.station import Station # noqa f401 +from datasette_interface.database.entity.base.task import Task # noqa f401 +from datasette_interface.database.entity.derived.eeg_sync import EEGSync # noqa f401 +from datasette_interface.database.entity.derived.ekg_sync import EKGSync # noqa f401 +from datasette_interface.database.entity.derived.fnirs_sync import FNIRSSync # noqa f401 +from datasette_interface.database.entity.derived.gsr_sync import GSRSync # noqa f401 +from datasette_interface.database.entity.signal.audio_vocalics import AudioVocalics # noqa f401 +from datasette_interface.database.entity.signal.eeg import EEGRaw # noqa f401 +from datasette_interface.database.entity.signal.fnirs import FNIRSRaw # noqa f401 +from datasette_interface.database.entity.signal.gaze import GAZERaw # noqa f401 +from datasette_interface.database.entity.signal.screen_capture import ScreenCapture # noqa f401 +from datasette_interface.database.entity.task.affective_task_event import ( + AffectiveTaskEvent, +) # noqa f401 +from datasette_interface.database.entity.task.finger_tapping_task_observation import ( + FingerTappingTaskObservation, +) # noqa f401 +from datasette_interface.database.entity.task.minecraft_task import ( # noqa f401 + MinecraftMission, + MinecraftTestbedMessage, +) +from datasette_interface.database.entity.task.ping_pong_competitive_task_observation import ( + PingPongCompetitiveTaskObservation, +) # noqa f401 +from datasette_interface.database.entity.task.ping_pong_cooperative_task_observation import ( + PingPongCooperativeTaskObservation, +) # noqa f401 +from datasette_interface.database.entity.task.rest_state_task import RestStateTask # noqa f401 answer = input( "[WARN] This operation will erase all tables and data saved on them. Do you want " "to proceed? (y/n): " ) if answer.lower() in ["y", "yes"]: + print(Base.metadata.tables) Base.metadata.drop_all(engine, checkfirst=True) print("Tables dropped successfully.") else: diff --git a/human_experiments/datasette_interface/datasette_interface/common/config.py b/human_experiments/datasette_interface/datasette_interface/common/config.py index 32ffa97fc..a6dd460f7 100644 --- a/human_experiments/datasette_interface/datasette_interface/common/config.py +++ b/human_experiments/datasette_interface/datasette_interface/common/config.py @@ -13,8 +13,8 @@ class Settings(BaseSettings): db_pass: str = "" db_host: str = "localhost" db_user: str = os.getenv("USER") - db_port: int = 5433 - db_name: str = "tomcat" + db_port: int = os.getenv("POSTGRES_PORT", 5433) + db_name: str = os.getenv("TOMCAT_DB_NAME", "tomcat") working_env: str = DEVELOPMENT artifact_dir: str = f"/space/{USER}/tomcat" image_url_root_dir: str = "https://ivilab.cs.arizona.edu/data/tomcat/group" diff --git a/human_experiments/datasette_interface/datasette_interface/database/config.py b/human_experiments/datasette_interface/datasette_interface/database/config.py index 2efdf5410..08d702bb4 100644 --- a/human_experiments/datasette_interface/datasette_interface/database/config.py +++ b/human_experiments/datasette_interface/datasette_interface/database/config.py @@ -1,6 +1,7 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy_utils import database_exists, create_database from datasette_interface.common.config import DEVELOPMENT, RUN_DIR, settings @@ -19,6 +20,11 @@ Base = declarative_base() engine = create_engine(SQLALCHEMY_DATABASE_URI) + +# Create the database in Postgres if it doesn't already exist. +if not database_exists(engine.url): + create_database(engine.url) + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) diff --git a/human_experiments/datasette_interface/datasette_interface/database/entity/signal/fnirs.py b/human_experiments/datasette_interface/datasette_interface/database/entity/signal/fnirs.py index ddf6b98db..efafaa904 100644 --- a/human_experiments/datasette_interface/datasette_interface/database/entity/signal/fnirs.py +++ b/human_experiments/datasette_interface/datasette_interface/database/entity/signal/fnirs.py @@ -11,7 +11,7 @@ class FNIRSRaw(Base): - __tablename__ = "fnirs_raw_tmp" + __tablename__ = "fnirs_raw" group_session_id: Mapped[str] = mapped_column( "group_session", Text, ForeignKey(GroupSession.id), primary_key=True diff --git a/human_experiments/datasette_interface/datasette_interface/raw/process_base_tables.py b/human_experiments/datasette_interface/datasette_interface/raw/process_base_tables.py index 70c832082..bdc9957e4 100755 --- a/human_experiments/datasette_interface/datasette_interface/raw/process_base_tables.py +++ b/human_experiments/datasette_interface/datasette_interface/raw/process_base_tables.py @@ -348,41 +348,48 @@ def process_demographic_data(): ) for i, row in df.iterrows(): + # Check if subject ID can be converted to an integer: + try: + participant_id = int(row["subject_id"]) + except ValueError: + continue + # Check if subject ID is in table try: participant = db.scalars( - select(Participant).where(Participant.id == f"{row['subject_id']}") + select(Participant).where(Participant.id == participant_id) ).one() - for label in row.index: - field = data_dictionary_df.loc[label] - field_type = field["Field Type"] - entry = row.loc[label] - if pd.isna(entry): - entry = None - - if entry is not None: - if field_type == "radio": - choices = field["Choices, Calculations, OR Slider Labels"] - choices = { - int(k): v - for k, v in [ - x.strip().split(", ") for x in choices.split("|") - ] - } - row.loc[label] = choices[row.loc[label]] + except NoResultFound: + continue - # Participant 14 entered their age as 18` instead of 18. - if row.loc["age"] == "18`": - row.loc["age"] = 18 + for label in row.index: + field = data_dictionary_df.loc[label] + field_type = field["Field Type"] + entry = row.loc[label] + if pd.isna(entry): + entry = None + + if entry is not None: + if field_type == "radio": + choices = field["Choices, Calculations, OR Slider Labels"] + choices = { + int(k): v + for k, v in [ + x.strip().split(", ") for x in choices.split("|") + ] + } + row.loc[label] = choices[row.loc[label]] + + # Participant 14 entered their age as 18` instead of 18. + if row.loc["age"] == "18`": + row.loc["age"] = 18 - for attr in row.index: - value = row.loc[attr] - if pd.isna(value): - value = None - setattr(participant, attr, value) - db.commit() - except NoResultFound: - pass + for attr in row.index: + value = row.loc[attr] + if pd.isna(value): + value = None + setattr(participant, attr, value) + db.commit() db.close() @@ -527,8 +534,6 @@ def process_post_game_survey(): db = next(get_db()) for i, row in tqdm(df.iterrows(), total=len(df)): participant_id = None - # Subject ID 63 did not finish the post-game survey in the first - # attempt, so we ignore the first attempt. if row.postgame_survey_timestamp == "[not completed]" or pd.isna( row.postgame_survey_timestamp ): diff --git a/human_experiments/datasette_interface/requirements.txt b/human_experiments/datasette_interface/requirements.txt index aebcd110b..62232d733 100644 --- a/human_experiments/datasette_interface/requirements.txt +++ b/human_experiments/datasette_interface/requirements.txt @@ -26,3 +26,4 @@ praat-textgrids==1.4.0 torch~=2.1.2 mne~=1.6.0 neurokit2~=0.2.7 +sqlalchemy-utils