Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove datetime workaround logic #273

Merged
merged 17 commits into from
Jun 4, 2018
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion aospy/calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@ def _file_name(self, dtype_out_time, extension='nc'):
dtype_vert=self.dtype_out_vert)
in_lbl = utils.io.data_in_label(self.intvl_in, self.dtype_in_time,
self.dtype_in_vert)
yr_lbl = utils.io.yr_label((self.start_date.year, self.end_date.year))
start_year = utils.times.infer_year(self.start_date)
end_year = utils.times.infer_year(self.end_date)
yr_lbl = utils.io.yr_label((start_year, end_year))
return '.'.join(
[self.name, out_lbl, in_lbl, self.model.name,
self.run.name, yr_lbl, extension]
Expand Down
27 changes: 13 additions & 14 deletions aospy/data_loader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""aospy DataLoader objects"""
import logging
import os
import warnings

import numpy as np
import xarray as xr
Expand Down Expand Up @@ -180,7 +179,6 @@ def _prep_time_data(ds):
The processed Dataset and minimum and maximum years in the loaded data
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Update docstring now that only ds is returned

"""
ds = times.ensure_time_as_index(ds)
ds, min_year, max_year = times.numpy_datetime_workaround_encode_cf(ds)
if TIME_BOUNDS_STR in ds:
ds = times.ensure_time_avg_has_cf_metadata(ds)
ds[TIME_STR] = times.average_time_bounds(ds)
Expand All @@ -189,10 +187,10 @@ def _prep_time_data(ds):
"values in time, even though this may not be "
"the case")
ds = times.add_uniform_time_weights(ds)
with warnings.catch_warnings(record=True):
with xr.set_options(enable_cftimeindex=True):
ds = xr.decode_cf(ds, decode_times=True, decode_coords=False,
mask_and_scale=True)
return ds, min_year, max_year
return ds


def _load_data_from_disk(file_set, preprocess_func=lambda ds: ds,
Expand Down Expand Up @@ -280,17 +278,15 @@ def load_variable(self, var=None, start_date=None, end_date=None,
coords=self.coords, start_date=start_date, end_date=end_date,
time_offset=time_offset, **DataAttrs
)

ds, min_year, max_year = _prep_time_data(ds)
ds = _prep_time_data(ds)
start_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], start_date)
end_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], end_date)
ds = set_grid_attrs_as_coords(ds)
da = _sel_var(ds, var, self.upcast_float32)
da = self._maybe_apply_time_shift(da, time_offset, **DataAttrs)

start_date_xarray = times.numpy_datetime_range_workaround(
start_date, min_year, max_year)
end_date_xarray = start_date_xarray + (end_date - start_date)
return times.sel_time(da, np.datetime64(start_date_xarray),
np.datetime64(end_date_xarray)).load()
return times.sel_time(da, start_date, end_date).load()

def recursively_compute_variable(self, var, start_date=None, end_date=None,
time_offset=None, **DataAttrs):
Expand Down Expand Up @@ -609,10 +605,13 @@ def _input_data_paths_gfdl(self, name, start_date, end_date, domain,
else:
subdir = os.path.join(intvl_in, dur_str)
direc = os.path.join(self.data_direc, domain, dtype_lbl, subdir)
data_start_year = times.infer_year(self.data_start_date)
start_year = times.infer_year(start_date)
end_year = times.infer_year(end_date)
files = [os.path.join(direc, io.data_name_gfdl(
name, domain, dtype, intvl_in, year, intvl_out,
self.data_start_date.year, self.data_dur))
for year in range(start_date.year, end_date.year + 1)]
data_start_year, self.data_dur))
for year in range(start_year, end_year + 1)]
files = list(set(files))
files.sort()
return files
6 changes: 3 additions & 3 deletions aospy/examples/example_obj_lib.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Sample aospy object library using the included example data."""
import datetime
from datetime import datetime
import os

import aospy
Expand All @@ -17,8 +17,8 @@
description=(
'Control simulation of the idealized moist model'
),
default_start_date=datetime.datetime(4, 1, 1),
default_end_date=datetime.datetime(6, 12, 31),
default_start_date=datetime(4, 1, 1),
default_end_date=datetime(6, 12, 31),
data_loader=DictDataLoader(_file_map)
)

Expand Down
282 changes: 129 additions & 153 deletions aospy/examples/tutorial.ipynb

Large diffs are not rendered by default.

7 changes: 4 additions & 3 deletions aospy/test/data/objects/examples.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from datetime import datetime
import os

from cftime import DatetimeNoLeap
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This means we'll need to add cftime as a required dependency, right?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch. cftime is a required dependency of netcdf4 now, but we can add it to our setup.py to be explicit and also maintain support for older versions of netcdf4.


from aospy import Proj, Model, Run, Var, Region
from aospy.data_loader import NestedDictDataLoader

Expand All @@ -25,8 +26,8 @@ def total_precipitation(convection_rain, condensation_rain):
'Control simulation of the idealized moist model'
),
data_loader=NestedDictDataLoader(file_map),
default_start_date=datetime(4, 1, 1),
default_end_date=datetime(6, 12, 31)
default_start_date=DatetimeNoLeap(4, 1, 1),
default_end_date=DatetimeNoLeap(6, 12, 31)
)

example_model = Model(
Expand Down
Loading