Skip to content
This repository has been archived by the owner on Jul 11, 2023. It is now read-only.

Change to use global configuration. #980

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Binary file not shown.
34 changes: 31 additions & 3 deletions gmprocess/subcommands/base.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import atexit
import logging
import os
import sys

from abc import ABC, abstractmethod
import logging

from gmprocess.subcommands.lazy_loader import LazyLoader

Expand Down Expand Up @@ -33,8 +35,16 @@ def __init__(self):
"""Dictionary instance variable to track files created by module."""
self.files_created = {}

self.workspace = None

# Make sure we close the workspace on exit
atexit.register(self.close_workspace)

def open_workspace(self, eventid):
"""Open workspace, add as attribute."""

self.close_workspace()

event_dir = os.path.join(self.gmrecords.data_path, eventid)
workname = os.path.join(event_dir, const.WORKSPACE_NAME)
if not os.path.isfile(workname):
Expand All @@ -46,10 +56,23 @@ def open_workspace(self, eventid):
return eventid

self.workspace = ws.StreamWorkspace.open(workname)
self.gmrecords.conf = self._get_config()

return self.workspace.dataset

def event_dir(self, event_id):
return os.path.normpath(
os.path.join(self.gmrecords.data_path, event_id)
)

def close_workspace(self):
"""Close workspace."""
self.workspace.close()
try:
logging.debug("Closing the workspace.")
self.workspace.close()
except AttributeError:
# Not dataset to close
pass

@property
@abstractmethod
Expand Down Expand Up @@ -198,7 +221,12 @@ def _get_labels(self):

def _get_config(self):
if hasattr(self, "workspace") and hasattr(self.workspace, "config"):
logging.info('Loading config from workspace.')
config = self.workspace.config
else:
config = confmod.get_config()
logging.info('Loading config from file.')
config = confmod.get_config(config_path=self.gmrecords.conf_path)
# Update the config in the workspace
self.workspace.addConfig(config=config)

return config
31 changes: 13 additions & 18 deletions gmprocess/subcommands/compute_station_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,23 +61,10 @@ def main(self, gmrecords):
def _event_station_metrics(self, event):
self.eventid = event.id
logging.info(f"Computing station metrics for event {self.eventid}...")
event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
workname = os.path.normpath(
os.path.join(event_dir, utils.constants.WORKSPACE_NAME)
)
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % self.eventid
)
logging.info("Continuing to next event.")
return event.id

self.workspace = ws.StreamWorkspace.open(workname)
ds = self.workspace.dataset
ds = self.open_workspace(event.id)
self._get_labels()

config = self._get_config()
config = self.gmrecords.conf

if not hasattr(self, "vs30_grids"):
vs30_grids = None
Expand All @@ -92,10 +79,9 @@ def _event_station_metrics(self, event):

station_list = ds.waveforms.list()
if not len(station_list):
self.workspace.close()
return event.id

rupture_file = rupt_utils.get_rupture_file(event_dir)
rupture_file = rupt_utils.get_rupture_file(self.event_dir(self.eventid))
origin = rupt.origin.Origin(
{
"id": self.eventid,
Expand Down Expand Up @@ -257,7 +243,6 @@ def _event_station_metrics(self, event):
"with tag '%s'." % self.gmrecords.args.label
)

self.workspace.close()
return event.id

def _get_ps2ff_splines(self):
Expand Down Expand Up @@ -292,11 +277,21 @@ def _get_ps2ff_splines(self):
kind="linear",
copy=False,
assume_sorted=True,
bounds_error=False,
fill_value=(
(Rjb_hat[0], Rjb_var[0]),
(Rjb_hat[-1], Rjb_var[-1])
)
)
self.rrup_spline = spint.interp1d(
repi,
np.vstack((Rrup_hat, Rrup_var)),
kind="linear",
copy=False,
assume_sorted=True,
bounds_error=False,
fill_value=(
(Rrup_hat[0], Rrup_var[0]),
(Rrup_hat[-1], Rrup_var[-1])
)
)
18 changes: 4 additions & 14 deletions gmprocess/subcommands/compute_waveform_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,21 +53,12 @@ def main(self, gmrecords):
def _compute_event_waveform_metrics(self, event):
self.eventid = event.id
logging.info(f"Computing waveform metrics for event {self.eventid}...")
event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
workname = os.path.normpath(os.path.join(event_dir, const.WORKSPACE_NAME))
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % self.eventid
)
logging.info("Continuing to next event.")
return event.id

self.workspace = ws.StreamWorkspace.open(workname)
ds = self.workspace.dataset
station_list = ds.waveforms.list()
ds = self.open_workspace(event.id)
self._get_labels()
config = self._get_config()
config = self.gmrecords.conf

station_list = ds.waveforms.list()

summaries = []
metricpaths = []
Expand Down Expand Up @@ -148,5 +139,4 @@ def _compute_event_waveform_metrics(self, event):
overwrite=self.gmrecords.args.overwrite,
)

self.workspace.close()
return event.id
16 changes: 4 additions & 12 deletions gmprocess/subcommands/export_failure_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,19 +59,11 @@ def main(self, gmrecords):
for event in self.events:
self.eventid = event.id
logging.info(f"Creating failure tables for event {self.eventid}...")
event_dir = os.path.join(self.gmrecords.data_path, self.eventid)
workname = os.path.normpath(os.path.join(event_dir, const.WORKSPACE_NAME))
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % self.eventid
)
logging.info("Continuing to next event.")
continue

self.workspace = ws.StreamWorkspace.open(workname)
self.open_workspace(event.id)
self._get_labels()

self._get_pstreams()
self.workspace.close()

if not (hasattr(self, "pstreams") and len(self.pstreams) > 0):
logging.info(
Expand All @@ -84,7 +76,7 @@ def main(self, gmrecords):

base_file_name = os.path.normpath(
os.path.join(
event_dir,
self.event_dir(event.id),
"%s_%s_failure_reasons_%s"
% (
gmrecords.project,
Expand Down
14 changes: 2 additions & 12 deletions gmprocess/subcommands/export_metric_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,10 @@ def main(self, gmrecords):
for event in self.events:
self.eventid = event.id
logging.info(f"Creating tables for event {self.eventid}...")
event_dir = os.path.join(gmrecords.data_path, self.eventid)
workname = os.path.normpath(os.path.join(event_dir, const.WORKSPACE_NAME))
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % self.eventid
)
logging.info("Continuing to next event.")
continue

self.workspace = ws.StreamWorkspace.open(workname)
self.open_workspace(event.id)
self._get_labels()
config = self._get_config()
config = self.gmrecords.conf

event_table, imc_tables, readmes = self.workspace.getTables(
self.gmrecords.args.label, config
Expand All @@ -72,7 +63,6 @@ def main(self, gmrecords):
snr_table, snr_readme = self.workspace.getSNRTable(
self.eventid, self.gmrecords.args.label, config
)
self.workspace.close()

outdir = gmrecords.data_path

Expand Down
32 changes: 11 additions & 21 deletions gmprocess/subcommands/export_provenance_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,41 +41,31 @@ def main(self, gmrecords):
for event in self.events:
self.eventid = event.id
logging.info(f"Creating provenance tables for event {self.eventid}...")
event_dir = os.path.normpath(
os.path.join(gmrecords.data_path, self.eventid)
)
workname = os.path.join(event_dir, const.WORKSPACE_NAME)
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % self.eventid
)
logging.info("Continuing to next event.")
continue

self.workspace = ws.StreamWorkspace.open(workname)
self.open_workspace(event.id)
self._get_pstreams()

if not (hasattr(self, "pstreams") and len(self.pstreams) > 0):
logging.info(
"No processed waveforms available. No provenance tables created."
)
self.workspace.close()
continue

provdata = self.workspace.getProvenance(
self.eventid, labels=self.gmrecords.args.label
)
self.workspace.close()

basename = f"{gmrecords.project}_{gmrecords.args.label}_provenance"
basename = os.path.join(
self.event_dir(event.id),
f"{gmrecords.project}_{gmrecords.args.label}_provenance"
)
if gmrecords.args.output_format == "csv":
csvfile = os.path.join(event_dir, f"{basename}.csv")
self.append_file("Provenance", csvfile)
provdata.to_csv(csvfile, index=False)
fname = basename + '.csv'
self.append_file("Provenance", fname)
provdata.to_csv(fname, index=False)
else:
excelfile = os.path.join(event_dir, f"{basename}.xlsx")
self.append_file("Provenance", excelfile)
provdata.to_excel(excelfile, index=False)
fname = basename + '.xslx'
self.append_file("Provenance", fname)
provdata.to_excel(fname, index=False)

self._summarize_files_created()
18 changes: 3 additions & 15 deletions gmprocess/subcommands/export_shakemap.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,31 +54,19 @@ def main(self, gmrecords):
self.eventid = event.id
logging.info(f"Creating shakemap files for event {self.eventid}...")

event_dir = os.path.normpath(os.path.join(gmrecords.data_path, event.id))
workname = os.path.join(event_dir, const.WORKSPACE_NAME)
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % event.id
)
logging.info("Continuing to next event.")
continue

self.workspace = ws.StreamWorkspace.open(workname)
self.open_workspace(event.id)
self._get_labels()
config = self._get_config()

expanded_imts = self.gmrecords.args.expand_imts
jsonfile, stationfile, _ = sm_utils.create_json(
self.workspace,
event,
event_dir,
self.event_dir(event.id),
self.gmrecords.args.label,
config=config,
config=self.gmrecords.conf,
expanded_imts=expanded_imts,
)

self.workspace.close()
if jsonfile is not None:
self.append_file("shakemap", jsonfile)
if stationfile is not None:
Expand Down
Loading