Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed bugs in relative_TT_residuals_plotter #215

Open
wants to merge 9 commits into
base: develop
Choose a base branch
from
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,4 @@ seismic/xcorqc/**/*.pbs
**/junk.*
**/LOG_CODE_DUPES_*.txt
**/LOG_LOCATION_DUPES_*.txt
/seismic/gps_corrections/p_combined.txt
2 changes: 1 addition & 1 deletion seismic/gps_corrections/gps_clock_correction_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def _update_next_button(self, *_unused):
nc_file_value = self.nc_file.get()
nc_file_valid = bool(nc_file_value) and os.path.isfile(nc_file_value)
if nc_file_valid:
self.station_code= self._extract_code_from_filename(nc_file_value)
self.station_code = self._extract_code_from_filename(nc_file_value)
else:
self.station_code = ''
self.stn_code_label['text'] = "Station code: " + self.station_code
Expand Down
26 changes: 15 additions & 11 deletions seismic/gps_corrections/relative_tt_residuals_plotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,7 @@
else:
import pathlib # pylint: disable=import-error

from seismic.gps_corrections.picks_reader_utils import (read_picks_ensemble,
get_network_stations,
compute_matching_network_mask,
generate_large_events_catalog)
from seismic.gps_corrections.picks_reader_utils import (read_picks_ensemble, get_network_stations, compute_matching_network_mask, generate_large_events_catalog)

# pylint: disable=invalid-name, fixme, too-many-locals, too-many-statements
# pylint: disable=attribute-defined-outside-init, logging-format-interpolation, logging-not-lazy
Expand Down Expand Up @@ -201,9 +198,11 @@ def pandas_timestamp_to_plottable_datetime(data):
return data.transform(datetime.datetime.utcfromtimestamp).astype('datetime64[ms]').dt.to_pydatetime()


def _plot_target_network_rel_residuals(df, target, ref, batch_options, filter_options, tt_scale=50,
def _plot_target_network_rel_residuals(df, target, ref, batch_options, filter_options, tt_scale=False,
snr_scale=(0, 60), annotator=None):

if tt_scale is False:
tt_scale = 50
file_label = batch_options.batch_label
save_file = batch_options.save_file

Expand Down Expand Up @@ -299,7 +298,7 @@ def _plot_dataset(ds, net_code, ref_code):
df_export.to_csv(fname, index=False)


def _plot_network_relative_to_ref_station(df_plot, ref, target_stns, batch_options, filter_options, display_options):
def _plot_network_relative_to_ref_station(df_plot, ref, target_stns, batch_options, filter_options, display_options, tt_scale):
"""
Compute relative residuals and send to plotting function.

Expand All @@ -318,11 +317,13 @@ def _plot_network_relative_to_ref_station(df_plot, ref, target_stns, batch_optio
:type display_options: class DisplayOptions
"""
register_matplotlib_converters()
if df_plot is None:
return
df_plot = df_plot.assign(relTtResidual=(df_plot['ttResidual'] - df_plot['ttResidualRef']))

# Re-order columns
df_plot = df_plot[['#eventID', 'originTimestamp', 'mag', 'originLon', 'originLat', 'originDepthKm',
'net', 'sta', 'cha', 'pickTimestamp', 'phase', 'stationLon', 'stationLat',
'net', 'sta', 'cha', 'pickTimestamp', 'stationLon', 'stationLat',
'distance', 'snr', 'ttResidual', 'ttResidualRef', 'relTtResidual',
'qualityMeasureCWT', 'qualityMeasureSlope', 'nSigma']]

Expand All @@ -335,7 +336,7 @@ def _plot_decorator(opts):
if opts.deployments is not None:
_add_temporary_deployment_intervals(opts.deployments)

_plot_target_network_rel_residuals(df_plot, target_stns, ref, batch_options, filter_options,
_plot_target_network_rel_residuals(df_plot, target_stns, ref, batch_options, filter_options, tt_scale,
annotator=lambda: _plot_decorator(display_options))
deregister_matplotlib_converters()

Expand Down Expand Up @@ -666,13 +667,16 @@ def _get_known_temporary_deployments():
@click.option('--interactive', is_flag=True, default=False, show_default=True,
help='If True, plots will be displayed as popup windows instead of saving to file. '
'Use this option to interact with the data.')
@click.option('--tt-scale', type=float, default=50, show_default=True,
help='y-axis limits for TT residual plots')

def main(picks_file, network1, networks2, stations1=None, stations2=None,
min_distance=DEFAULT_MIN_DISTANCE, max_distance=DEFAULT_MAX_DISTANCE,
min_event_snr=DEFAULT_MIN_EVENT_SNR, cwt_cutoff=DEFAULT_CWT_CUTOFF,
slope_cutoff=DEFAULT_SLOPE_CUTOFF, nsigma_cutoff=DEFAULT_NSIGMA_CUTOFF,
min_event_magnitude=DEFAULT_MIN_EVENT_MAG, strict_filtering=DEFAULT_STRICT_FILTERING,
show_deployments=False, show_historical=True, include_alternate_catalog=True,
export_path=None, interactive=False): # pragma: no cover
export_path=None, interactive=False, tt_scale=False): # pragma: no cover
"""
Main function for running relative traveltime residual plotting. The picks ensemble file should
have column headings in accordance with picks_reader_utils.PICKS_TABLE_SCHEMA.
Expand Down Expand Up @@ -704,7 +708,7 @@ def main(picks_file, network1, networks2, stations1=None, stations2=None,
df_raw_picks = read_picks_ensemble(picks_file)
# Remove unused columns
df_raw_picks = df_raw_picks[['#eventID', 'originTimestamp', 'mag', 'originLon', 'originLat', 'originDepthKm',
'net', 'sta', 'cha', 'pickTimestamp', 'phase', 'stationLon', 'stationLat', 'az',
'net', 'sta', 'cha', 'pickTimestamp', 'stationLon', 'stationLat', 'az',
'baz', 'distance', 'ttResidual', 'snr', 'qualityMeasureCWT', 'qualityMeasureSlope',
'nSigma']]
log.debug("Number of raw picks: {}".format(len(df_raw_picks)))
Expand Down Expand Up @@ -789,7 +793,7 @@ def main(picks_file, network1, networks2, stations1=None, stations2=None,
df_final = analyze_target_relative_to_ref(df_picks, single_ref, TARGET_STNS, filter_options)
# Plot results
_plot_network_relative_to_ref_station(df_final, single_ref, TARGET_STNS, batch_options,
filter_options, display_options)
filter_options, display_options, tt_scale)
# end for
# end for

Expand Down
32 changes: 32 additions & 0 deletions seismic/gps_corrections/requirementsVDI.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# cd to hiperseis directory
# module purge
# module load openmpi
# module load mpi4py
# module load hdf5/1.10.5p
# module load python3-as-python
# alias python=python3
# python -m venv venv
# source venv/bin/activate
# pip install -r seismic/gps_corrections/requirementsVDI.txt
pyasdf
numpy
obspy
pandas
matplotlib
ordered-set
psutil
click
ujson
sklearn
netcdf4
tqdm
shapely
descartes
pyproj
pytz
pathlib2
jupyter
# export PYTHONPATH=$PYTHONPATH:'pwd'
# python3 -m seismic.gps_corrections.gps_clock_correction_gui
# python -m seismic.gps_corrections.relative_tt_residuals_plotter seismic/gps_corrections/p_combined.txt --network1=1Q --networks2=AU --no-strict-filtering
# jupyter notebook
4 changes: 2 additions & 2 deletions seismic/xcorqc/xcorr_station_clock_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def do_spline_regression(self, group_ids, regression_degree):
y = self.corrections_clean[mask_group]
# Constrain the knot frequency to be proportional to the order, so that we don't overfit
# to high frequency variations in the data just for the sake of lowering the spline residual.
t = np.linspace(x[0], x[-1], degree - 1)
t = np.linspace(x[0], x[-1], int(degree - 1))
r = LSQUnivariateSpline(x, y, t[1:-1], k=degree)
regressions[i] = r

Expand All @@ -176,7 +176,7 @@ def do_spline_resampling(self, group_ids, regressors, sampling_period_seconds):
timestamp_min = min(x)
timestamp_max = max(x)
num_samples = np.round((timestamp_max - timestamp_min) / sampling_period_seconds)
lin_times = np.linspace(timestamp_min, timestamp_max, num_samples + 1)
lin_times = np.linspace(timestamp_min, timestamp_max, int(num_samples + 1))
lin_corrections = regressors[i](lin_times)
regular_corrections[i] = {'times': lin_times, 'corrections': lin_corrections}

Expand Down