20
20
from .merge import (dataset_update_method , dataset_merge_method ,
21
21
merge_data_and_coords )
22
22
from .utils import Frozen , SortedKeysDict , maybe_wrap_array , hashable
23
- from .variable import (Variable , as_variable , Coordinate , broadcast_variables )
23
+ from .variable import (Variable , as_variable , IndexVariable , broadcast_variables )
24
24
from .pycompat import (iteritems , basestring , OrderedDict ,
25
25
dask_array_type )
26
26
from .combine import concat
@@ -157,7 +157,7 @@ class Dataset(Mapping, ImplementsDatasetReduce, BaseDataObject,
157
157
groupby_cls = groupby .DatasetGroupBy
158
158
159
159
def __init__ (self , data_vars = None , coords = None , attrs = None ,
160
- compat = 'broadcast_equals' , ** kwargs ):
160
+ compat = 'broadcast_equals' ):
161
161
"""To load data from a file or file-like object, use the `open_dataset`
162
162
function.
163
163
@@ -183,7 +183,7 @@ def __init__(self, data_vars=None, coords=None, attrs=None,
183
183
Global attributes to save on this dataset.
184
184
compat : {'broadcast_equals', 'equals', 'identical'}, optional
185
185
String indicating how to compare variables of the same name for
186
- potential conflicts:
186
+ potential conflicts when initializing this dataset :
187
187
188
188
- 'broadcast_equals': all values must be equal when variables are
189
189
broadcast against each other to ensure common dimensions.
@@ -196,14 +196,6 @@ def __init__(self, data_vars=None, coords=None, attrs=None,
196
196
self ._dims = {}
197
197
self ._attrs = None
198
198
self ._file_obj = None
199
- if kwargs :
200
- if 'variables' in kwargs :
201
- data_vars = kwargs .pop ('variables' )
202
- warnings .warn ('`variables` kwarg is deprecated. Use '
203
- '`data_vars` instead.' , stacklevel = 2 )
204
- if kwargs :
205
- raise TypeError (
206
- '{0} are not valid kwargs' .format (kwargs .keys ()))
207
199
if data_vars is None :
208
200
data_vars = {}
209
201
if coords is None :
@@ -326,12 +318,6 @@ def load(self):
326
318
327
319
return self
328
320
329
- def load_data (self ): # pragma: no cover
330
- warnings .warn ('the Dataset method `load_data` has been deprecated; '
331
- 'use `load` instead' ,
332
- FutureWarning , stacklevel = 2 )
333
- return self .load ()
334
-
335
321
@classmethod
336
322
def _construct_direct (cls , variables , coord_names , dims = None , attrs = None ,
337
323
file_obj = None ):
@@ -398,7 +384,7 @@ def _replace_indexes(self, indexes):
398
384
return self
399
385
variables = self ._variables .copy ()
400
386
for name , idx in indexes .items ():
401
- variables [name ] = Coordinate (name , idx )
387
+ variables [name ] = IndexVariable (name , idx )
402
388
obj = self ._replace_vars_and_dims (variables )
403
389
404
390
# switch from dimension to level names, if necessary
@@ -641,13 +627,6 @@ def data_vars(self):
641
627
"""
642
628
return DataVariables (self )
643
629
644
- @property
645
- def vars (self ): # pragma: no cover
646
- warnings .warn ('the Dataset property `vars` has been deprecated; '
647
- 'use `data_vars` instead' ,
648
- FutureWarning , stacklevel = 2 )
649
- return self .data_vars
650
-
651
630
def set_coords (self , names , inplace = False ):
652
631
"""Given names of one or more variables, set them as coordinates
653
632
@@ -781,9 +760,6 @@ def to_netcdf(self, path=None, mode='w', format=None, group=None,
781
760
return to_netcdf (self , path , mode , format = format , group = group ,
782
761
engine = engine , encoding = encoding )
783
762
784
- dump = utils .function_alias (to_netcdf , 'dump' )
785
- dumps = utils .function_alias (to_netcdf , 'dumps' )
786
-
787
763
def __unicode__ (self ):
788
764
return formatting .dataset_repr (self )
789
765
@@ -1303,7 +1279,10 @@ def swap_dims(self, dims_dict, inplace=False):
1303
1279
1304
1280
for k , v in iteritems (self .variables ):
1305
1281
dims = tuple (dims_dict .get (dim , dim ) for dim in v .dims )
1306
- var = v .to_coord () if k in result_dims else v .to_variable ()
1282
+ if k in result_dims :
1283
+ var = v .to_index_variable ()
1284
+ else :
1285
+ var = v .to_base_variable ()
1307
1286
var .dims = dims
1308
1287
variables [k ] = var
1309
1288
@@ -1326,7 +1305,7 @@ def _stack_once(self, dims, new_dim):
1326
1305
1327
1306
idx = utils .multiindex_from_product_levels (
1328
1307
[self .indexes [d ] for d in dims ], names = dims )
1329
- variables [new_dim ] = Coordinate (new_dim , idx )
1308
+ variables [new_dim ] = IndexVariable (new_dim , idx )
1330
1309
1331
1310
coord_names = set (self ._coord_names ) - set (dims ) | set ([new_dim ])
1332
1311
@@ -1404,7 +1383,7 @@ def unstack(self, dim):
1404
1383
variables [name ] = var
1405
1384
1406
1385
for name , lev in zip (new_dim_names , index .levels ):
1407
- variables [name ] = Coordinate (name , lev )
1386
+ variables [name ] = IndexVariable (name , lev )
1408
1387
1409
1388
coord_names = set (self ._coord_names ) - set ([dim ]) | set (new_dim_names )
1410
1389
@@ -1533,12 +1512,6 @@ def _drop_vars(self, names):
1533
1512
coord_names = set (k for k in self ._coord_names if k in variables )
1534
1513
return self ._replace_vars_and_dims (variables , coord_names )
1535
1514
1536
- def drop_vars (self , * names ): # pragma: no cover
1537
- warnings .warn ('the Dataset method `drop_vars` has been deprecated; '
1538
- 'use `drop` instead' ,
1539
- FutureWarning , stacklevel = 2 )
1540
- return self .drop (names )
1541
-
1542
1515
def transpose (self , * dims ):
1543
1516
"""Return a new Dataset object with all array dimensions transposed.
1544
1517
0 commit comments