diff --git a/scripts/lib/db/db_tables.py b/scripts/lib/db/db_tables.py index df1963d..f3ee885 100755 --- a/scripts/lib/db/db_tables.py +++ b/scripts/lib/db/db_tables.py @@ -1,5 +1,6 @@ #!/usr/bin/env python3 +import os import sys import logging @@ -17,7 +18,7 @@ # Add handler to logger LOCAL_HANDLER = logging.StreamHandler(sys.stdout) LOGGER.addHandler(LOCAL_HANDLER) -LOGGER.setLevel(logging.DEBUG) # logging.INFO +LOGGER.setLevel(logging.INFO) # logging.DEBUG QUERY_DB_FILE = 'query_data.db' @@ -143,15 +144,15 @@ class QueryDB(): ''' A simple database class to manage the creation, writing and reading of the query database ''' - def __init__(self, create=False, db_name='query_data.db'): - LOGGER.debug(f"__init__ db {create=} {db_name=}") + def __init__(self, overwrite=False, db_name='query_data.db'): + LOGGER.debug(f"__init__ db {overwrite=} {db_name=}") self.error = '' try: db_name = 'sqlite:///' + db_name eng = create_engine(db_name, echo=False) - if create: + if overwrite: Base.metadata.drop_all(bind=eng) - Base.metadata.create_all(bind=eng) + Base.metadata.create_all(bind=eng) # 'scoped_session()' makes a thread-safe cache of session objects # NOTE: Would like to eventually make scope_session() more global self.session_obj = scoped_session(sessionmaker(eng)) @@ -200,29 +201,18 @@ def add_part(self, json_str): :returns: a tuple (True, partinfo_obj) if successful (False, exception string) if operation failed """ - LOGGER.debug(f"add_part({json_str})") - LOGGER.debug(f"{self.metadata_obj.tables.keys()=}") try: if 'part_info' not in self.metadata_obj.tables.keys(): - LOGGER.debug("'part_info' not in metadata") part_obj = PartInfo(json=json_str) - LOGGER.debug(f"{part_obj=}") self.ses.add(part_obj) - LOGGER.debug("'part_obj' added") self.ses.commit() - LOGGER.debug("'part_obj' committed") return True, part_obj part_obj = self.ses.query(PartInfo).filter_by(json=json_str).first() - LOGGER.debug(f"{part_obj=}") if part_obj is None: part_obj = PartInfo(json=json_str) - LOGGER.debug(f"2:{part_obj=}") self.ses.add(part_obj) - LOGGER.debug("2:'part_obj' added") self.ses.commit() - LOGGER.debug("2:'part_obj' committed") except DatabaseError as db_exc: - LOGGER.debug(f"Exception in add_part {db_exc}") return False, str(db_exc) return True, part_obj @@ -330,7 +320,7 @@ def __del__(self): if __name__ == "__main__": print("Testing query db") # Basic unit testing - QUERY_DB = QueryDB(create=True, db_name='db') #':memory:') + QUERY_DB = QueryDB(overwrite=True, db_name=':memory:') MSG = QUERY_DB.get_error() if MSG != '': print(MSG) diff --git a/scripts/webapi/webapi.py b/scripts/webapi/webapi.py index ae0193e..f8807fd 100644 --- a/scripts/webapi/webapi.py +++ b/scripts/webapi/webapi.py @@ -461,7 +461,7 @@ def make_getfeatinfobyid_response(model, version, query_format, layer_names, obj # Query database # Open up query database db_path = os.path.join(DATA_DIR, QUERY_DB_FILE) - qdb = QueryDB(create=False, db_name=db_path) + qdb = QueryDB(overwrite=False, db_name=db_path) err_msg = qdb.get_error() if err_msg != '': LOGGER.error('Could not open query db %s: %s', db_path, err_msg) diff --git a/web_build/make_boreholes.py b/web_build/make_boreholes.py index 9807c18..4dfd116 100755 --- a/web_build/make_boreholes.py +++ b/web_build/make_boreholes.py @@ -189,13 +189,13 @@ def get_boreholes(reader, qdb, param_obj, output_mode='GLTF', dest_dir=''): return loadconfig_list, blob_obj -def process_single(dest_dir, input_file, db_name, create_db=True): +def process_single(dest_dir, input_file, db_name, overwrite_db=True): ''' Process a single model's boreholes :param dest_dir: directory to output database and files :param input_file: conversion parameter file :param db_name: name of database - :param create_db: optional (default True) create new database or append to existing one + :param overwrite_db: optional (default True) remove all current database tables or not ''' LOGGER.info(f"Processing {input_file}") @@ -214,7 +214,7 @@ def process_single(dest_dir, input_file, db_name, create_db=True): if reader.wfs is None: LOGGER.error("Cannot contact web service or no boreholes in range") return - qdb = QueryDB(create=create_db, db_name=db_name) + qdb = QueryDB(overwrite=overwrite_db, db_name=db_name) err_str = qdb.get_error() if err_str != '': LOGGER.error(f"Cannot open/create database: {err_str}") @@ -255,7 +255,7 @@ def process_single(dest_dir, input_file, db_name, create_db=True): LOGGER.info(f"Creating {ARGS.dest_dir}") os.mkdir(ARGS.dest_dir) except OSError as os_exc: - LOGGER.error(f"Cannot create dir {ARGS.dest_dir}:{os_exc}") + LOGGER.error(f"Cannot create dir {ARGS.dest_dir}: {os_exc}") sys.exit(1) # Check input file @@ -270,14 +270,20 @@ def process_single(dest_dir, input_file, db_name, create_db=True): if not os.path.isfile(ARGS.batch): LOGGER.error(f"Batch file does not exist: {ARGS.batch}") sys.exit(1) - CREATE_DB = True + db_file = os.path.join(ARGS.dest_dir, ARGS.database) + # Remove db file + if os.path.exists(db_file): + LOGGER.info(f"Removing {db_file}") + try: + os.remove(db_file) + except OSError as os_exc: + LOGGER.error(f"Cannot remove db file {db_file}: {os_exc}") + sys.exit(1) with open(ARGS.batch, 'r') as fp: for line in fp: # Skip lines starting with '#' if line[0] != '#': - process_single(ARGS.dest_dir, line.rstrip('\n'), - os.path.join(ARGS.dest_dir, ARGS.database), CREATE_DB) - CREATE_DB = False + process_single(ARGS.dest_dir, line.rstrip('\n'), db_file, overwrite_db=False) else: print("No input file or batch file specified\n") PARSER.print_help()