Skip to content

Commit

Permalink
Merge pull request #13 from gregstarr/develop
Browse files Browse the repository at this point in the history
finalize southern hemisphere
  • Loading branch information
gregstarr authored Apr 1, 2022
2 parents 932cbba + a20ef65 commit 0ba2ecd
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 5 deletions.
2 changes: 1 addition & 1 deletion trough/_arb.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def get_arb_paths(start_date, end_date, hemisphere, processed_dir):
def get_arb_data(start_date, end_date, hemisphere, processed_dir=None):
if processed_dir is None:
processed_dir = config.processed_arb_dir
data = xr.concat([xr.open_dataarray(file) for file in get_arb_paths(start_date, end_date, hemisphere, processed_dir)], 'time')
data = utils.read_netcdfs(get_arb_paths(start_date, end_date, hemisphere, processed_dir), 'time')
return data.sel(time=slice(start_date, end_date))


Expand Down
2 changes: 1 addition & 1 deletion trough/_tec.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def get_tec_paths(start_date, end_date, hemisphere, processed_dir):
def get_tec_data(start_date, end_date, hemisphere, processed_dir=None):
if processed_dir is None:
processed_dir = config.processed_tec_dir
data = xr.concat([xr.open_dataarray(file) for file in get_tec_paths(start_date, end_date, hemisphere, processed_dir)], 'time')
data = utils.read_netcdfs(get_tec_paths(start_date, end_date, hemisphere, processed_dir), 'time')
return data.sel(time=slice(start_date, end_date))


Expand Down
6 changes: 3 additions & 3 deletions trough/_trough.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@ def get_model(tec_data, hemisphere, omni_file):
logger.info(f"{kp.shape=}")
apex = Apex(date=utils.datetime64_to_datetime(tec_data.time.values[0]))
mlat = 65.5 * np.ones((tec_data.time.shape[0], tec_data.mlt.shape[0]))
if hemisphere == 'south':
mlat = mlat * -1
for i in range(10):
glat, glon = apex.convert(mlat, tec_data.mlt.values[None, :], 'mlt', 'geo', 350, tec_data.time.values[:, None])
mlat = _model_subroutine_lat(tec_data.mlt.values[None, :], glon, kp[:, None], hemisphere)
if hemisphere == 'south':
mlat = mlat * -1
tec_data['model'] = xr.DataArray(
mlat,
coords={'time': tec_data.time, 'mlt': tec_data.mlt},
Expand Down Expand Up @@ -311,7 +311,7 @@ def get_label_paths(start_date, end_date, hemisphere, processed_dir):
def get_trough_labels(start_date, end_date, hemisphere, labels_dir=None):
if labels_dir is None:
labels_dir = config.processed_labels_dir
data = xr.concat([xr.open_dataarray(file) for file in get_label_paths(start_date, end_date, hemisphere, labels_dir)], 'time')
data = utils.read_netcdfs(get_label_paths(start_date, end_date, hemisphere, labels_dir), 'time')
return data.sel(time=slice(start_date, end_date))


Expand Down
18 changes: 18 additions & 0 deletions trough/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import datetime
import warnings
import logging
import xarray as xr
try:
import h5py
from skimage.util import view_as_windows
Expand Down Expand Up @@ -153,3 +154,20 @@ def check(start, end, dt, hemisphere, processed_file):
return True

return check


def read_netcdfs(files, dim):
"""https://xarray.pydata.org/en/stable/user-guide/io.html#reading-multi-file-datasets
"""
def process_one_path(path):
# use a context manager, to ensure the file gets closed after use
with xr.open_dataarray(path) as ds:
# load all data from the transformed dataset, to ensure we can
# use it after closing each original file
ds.load()
return ds

paths = sorted(files)
datasets = [process_one_path(p) for p in paths]
combined = xr.concat(datasets, dim)
return combined

0 comments on commit 0ba2ecd

Please sign in to comment.