@@ -128,12 +128,42 @@ class Only32BitTypes(object):
128
128
129
129
class DatasetIOTestCases (object ):
130
130
autoclose = False
131
+ engine = None
132
+ file_format = None
131
133
132
134
def create_store (self ):
133
135
raise NotImplementedError
134
136
135
- def roundtrip (self , data , ** kwargs ):
136
- raise NotImplementedError
137
+ @contextlib .contextmanager
138
+ def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
139
+ allow_cleanup_failure = False ):
140
+ with create_tmp_file (
141
+ allow_cleanup_failure = allow_cleanup_failure ) as path :
142
+ self .save (data , path , ** save_kwargs )
143
+ with self .open (path , ** open_kwargs ) as ds :
144
+ yield ds
145
+
146
+ @contextlib .contextmanager
147
+ def roundtrip_append (self , data , save_kwargs = {}, open_kwargs = {},
148
+ allow_cleanup_failure = False ):
149
+ with create_tmp_file (
150
+ allow_cleanup_failure = allow_cleanup_failure ) as path :
151
+ for i , key in enumerate (data .variables ):
152
+ mode = 'a' if i > 0 else 'w'
153
+ self .save (data [[key ]], path , mode = mode , ** save_kwargs )
154
+ with self .open (path , ** open_kwargs ) as ds :
155
+ yield ds
156
+
157
+ # The save/open methods may be overwritten below
158
+ def save (self , dataset , path , ** kwargs ):
159
+ dataset .to_netcdf (path , engine = self .engine , format = self .file_format ,
160
+ ** kwargs )
161
+
162
+ @contextlib .contextmanager
163
+ def open (self , path , ** kwargs ):
164
+ with open_dataset (path , engine = self .engine , autoclose = self .autoclose ,
165
+ ** kwargs ) as ds :
166
+ yield ds
137
167
138
168
def test_zero_dimensional_variable (self ):
139
169
expected = create_test_data ()
@@ -563,6 +593,23 @@ def test_encoding_same_dtype(self):
563
593
self .assertEqual (actual .x .encoding ['dtype' ], 'f4' )
564
594
self .assertEqual (ds .x .encoding , {})
565
595
596
+ def test_append_write (self ):
597
+ # regression for GH1215
598
+ data = create_test_data ()
599
+ with self .roundtrip_append (data ) as actual :
600
+ assert_allclose (data , actual )
601
+
602
+ def test_append_overwrite_values (self ):
603
+ # regression for GH1215
604
+ data = create_test_data ()
605
+ with create_tmp_file (allow_cleanup_failure = False ) as tmp_file :
606
+ self .save (data , tmp_file , mode = 'w' )
607
+ data ['var2' ][:] = - 999
608
+ data ['var9' ] = data ['var2' ] * 3
609
+ self .save (data [['var2' , 'var9' ]], tmp_file , mode = 'a' )
610
+ with self .open (tmp_file ) as actual :
611
+ assert_allclose (data , actual )
612
+
566
613
567
614
_counter = itertools .count ()
568
615
@@ -592,6 +639,9 @@ def create_tmp_files(nfiles, suffix='.nc', allow_cleanup_failure=False):
592
639
593
640
@requires_netCDF4
594
641
class BaseNetCDF4Test (CFEncodedDataTest ):
642
+
643
+ engine = 'netcdf4'
644
+
595
645
def test_open_group (self ):
596
646
# Create a netCDF file with a dataset stored within a group
597
647
with create_tmp_file () as tmp_file :
@@ -813,16 +863,6 @@ def create_store(self):
813
863
with backends .NetCDF4DataStore .open (tmp_file , mode = 'w' ) as store :
814
864
yield store
815
865
816
- @contextlib .contextmanager
817
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
818
- allow_cleanup_failure = False ):
819
- with create_tmp_file (
820
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
821
- data .to_netcdf (tmp_file , ** save_kwargs )
822
- with open_dataset (tmp_file ,
823
- autoclose = self .autoclose , ** open_kwargs ) as ds :
824
- yield ds
825
-
826
866
def test_variable_order (self ):
827
867
# doesn't work with scipy or h5py :(
828
868
ds = Dataset ()
@@ -883,19 +923,13 @@ class NetCDF4ViaDaskDataTestAutocloseTrue(NetCDF4ViaDaskDataTest):
883
923
884
924
@requires_scipy
885
925
class ScipyInMemoryDataTest (CFEncodedDataTest , Only32BitTypes , TestCase ):
926
+ engine = 'scipy'
927
+
886
928
@contextlib .contextmanager
887
929
def create_store (self ):
888
930
fobj = BytesIO ()
889
931
yield backends .ScipyDataStore (fobj , 'w' )
890
932
891
- @contextlib .contextmanager
892
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
893
- allow_cleanup_failure = False ):
894
- serialized = data .to_netcdf (** save_kwargs )
895
- with open_dataset (serialized , engine = 'scipy' ,
896
- autoclose = self .autoclose , ** open_kwargs ) as ds :
897
- yield ds
898
-
899
933
def test_to_netcdf_explicit_engine (self ):
900
934
# regression test for GH1321
901
935
Dataset ({'foo' : 42 }).to_netcdf (engine = 'scipy' )
@@ -915,6 +949,8 @@ class ScipyInMemoryDataTestAutocloseTrue(ScipyInMemoryDataTest):
915
949
916
950
@requires_scipy
917
951
class ScipyFileObjectTest (CFEncodedDataTest , Only32BitTypes , TestCase ):
952
+ engine = 'scipy'
953
+
918
954
@contextlib .contextmanager
919
955
def create_store (self ):
920
956
fobj = BytesIO ()
@@ -925,9 +961,9 @@ def roundtrip(self, data, save_kwargs={}, open_kwargs={},
925
961
allow_cleanup_failure = False ):
926
962
with create_tmp_file () as tmp_file :
927
963
with open (tmp_file , 'wb' ) as f :
928
- data . to_netcdf ( f , ** save_kwargs )
964
+ self . save ( data , f , ** save_kwargs )
929
965
with open (tmp_file , 'rb' ) as f :
930
- with open_dataset ( f , engine = 'scipy' , ** open_kwargs ) as ds :
966
+ with self . open ( f , ** open_kwargs ) as ds :
931
967
yield ds
932
968
933
969
@pytest .mark .skip (reason = 'cannot pickle file objects' )
@@ -941,22 +977,14 @@ def test_pickle_dataarray(self):
941
977
942
978
@requires_scipy
943
979
class ScipyFilePathTest (CFEncodedDataTest , Only32BitTypes , TestCase ):
980
+ engine = 'scipy'
981
+
944
982
@contextlib .contextmanager
945
983
def create_store (self ):
946
984
with create_tmp_file () as tmp_file :
947
985
with backends .ScipyDataStore (tmp_file , mode = 'w' ) as store :
948
986
yield store
949
987
950
- @contextlib .contextmanager
951
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
952
- allow_cleanup_failure = False ):
953
- with create_tmp_file (
954
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
955
- data .to_netcdf (tmp_file , engine = 'scipy' , ** save_kwargs )
956
- with open_dataset (tmp_file , engine = 'scipy' ,
957
- autoclose = self .autoclose , ** open_kwargs ) as ds :
958
- yield ds
959
-
960
988
def test_array_attrs (self ):
961
989
ds = Dataset (attrs = {'foo' : [[1 , 2 ], [3 , 4 ]]})
962
990
with self .assertRaisesRegexp (ValueError , 'must be 1-dimensional' ):
@@ -995,24 +1023,16 @@ class ScipyFilePathTestAutocloseTrue(ScipyFilePathTest):
995
1023
996
1024
@requires_netCDF4
997
1025
class NetCDF3ViaNetCDF4DataTest (CFEncodedDataTest , Only32BitTypes , TestCase ):
1026
+ engine = 'netcdf4'
1027
+ file_format = 'NETCDF3_CLASSIC'
1028
+
998
1029
@contextlib .contextmanager
999
1030
def create_store (self ):
1000
1031
with create_tmp_file () as tmp_file :
1001
1032
with backends .NetCDF4DataStore .open (
1002
1033
tmp_file , mode = 'w' , format = 'NETCDF3_CLASSIC' ) as store :
1003
1034
yield store
1004
1035
1005
- @contextlib .contextmanager
1006
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
1007
- allow_cleanup_failure = False ):
1008
- with create_tmp_file (
1009
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
1010
- data .to_netcdf (tmp_file , format = 'NETCDF3_CLASSIC' ,
1011
- engine = 'netcdf4' , ** save_kwargs )
1012
- with open_dataset (tmp_file , engine = 'netcdf4' ,
1013
- autoclose = self .autoclose , ** open_kwargs ) as ds :
1014
- yield ds
1015
-
1016
1036
1017
1037
class NetCDF3ViaNetCDF4DataTestAutocloseTrue (NetCDF3ViaNetCDF4DataTest ):
1018
1038
autoclose = True
@@ -1021,24 +1041,16 @@ class NetCDF3ViaNetCDF4DataTestAutocloseTrue(NetCDF3ViaNetCDF4DataTest):
1021
1041
@requires_netCDF4
1022
1042
class NetCDF4ClassicViaNetCDF4DataTest (CFEncodedDataTest , Only32BitTypes ,
1023
1043
TestCase ):
1044
+ engine = 'netcdf4'
1045
+ file_format = 'NETCDF4_CLASSIC'
1046
+
1024
1047
@contextlib .contextmanager
1025
1048
def create_store (self ):
1026
1049
with create_tmp_file () as tmp_file :
1027
1050
with backends .NetCDF4DataStore .open (
1028
1051
tmp_file , mode = 'w' , format = 'NETCDF4_CLASSIC' ) as store :
1029
1052
yield store
1030
1053
1031
- @contextlib .contextmanager
1032
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
1033
- allow_cleanup_failure = False ):
1034
- with create_tmp_file (
1035
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
1036
- data .to_netcdf (tmp_file , format = 'NETCDF4_CLASSIC' ,
1037
- engine = 'netcdf4' , ** save_kwargs )
1038
- with open_dataset (tmp_file , engine = 'netcdf4' ,
1039
- autoclose = self .autoclose , ** open_kwargs ) as ds :
1040
- yield ds
1041
-
1042
1054
1043
1055
class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue (
1044
1056
NetCDF4ClassicViaNetCDF4DataTest ):
@@ -1049,21 +1061,12 @@ class NetCDF4ClassicViaNetCDF4DataTestAutocloseTrue(
1049
1061
class GenericNetCDFDataTest (CFEncodedDataTest , Only32BitTypes , TestCase ):
1050
1062
# verify that we can read and write netCDF3 files as long as we have scipy
1051
1063
# or netCDF4-python installed
1064
+ file_format = 'netcdf3_64bit'
1052
1065
1053
1066
def test_write_store (self ):
1054
1067
# there's no specific store to test here
1055
1068
pass
1056
1069
1057
- @contextlib .contextmanager
1058
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
1059
- allow_cleanup_failure = False ):
1060
- with create_tmp_file (
1061
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
1062
- data .to_netcdf (tmp_file , format = 'netcdf3_64bit' , ** save_kwargs )
1063
- with open_dataset (tmp_file ,
1064
- autoclose = self .autoclose , ** open_kwargs ) as ds :
1065
- yield ds
1066
-
1067
1070
def test_engine (self ):
1068
1071
data = create_test_data ()
1069
1072
with self .assertRaisesRegexp (ValueError , 'unrecognized engine' ):
@@ -1122,21 +1125,13 @@ class GenericNetCDFDataTestAutocloseTrue(GenericNetCDFDataTest):
1122
1125
@requires_h5netcdf
1123
1126
@requires_netCDF4
1124
1127
class H5NetCDFDataTest (BaseNetCDF4Test , TestCase ):
1128
+ engine = 'h5netcdf'
1129
+
1125
1130
@contextlib .contextmanager
1126
1131
def create_store (self ):
1127
1132
with create_tmp_file () as tmp_file :
1128
1133
yield backends .H5NetCDFStore (tmp_file , 'w' )
1129
1134
1130
- @contextlib .contextmanager
1131
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
1132
- allow_cleanup_failure = False ):
1133
- with create_tmp_file (
1134
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
1135
- data .to_netcdf (tmp_file , engine = 'h5netcdf' , ** save_kwargs )
1136
- with open_dataset (tmp_file , engine = 'h5netcdf' ,
1137
- autoclose = self .autoclose , ** open_kwargs ) as ds :
1138
- yield ds
1139
-
1140
1135
def test_orthogonal_indexing (self ):
1141
1136
# doesn't work for h5py (without using dask as an intermediate layer)
1142
1137
pass
@@ -1646,14 +1641,13 @@ def test_orthogonal_indexing(self):
1646
1641
pass
1647
1642
1648
1643
@contextlib .contextmanager
1649
- def roundtrip (self , data , save_kwargs = {}, open_kwargs = {},
1650
- allow_cleanup_failure = False ):
1651
- with create_tmp_file (
1652
- allow_cleanup_failure = allow_cleanup_failure ) as tmp_file :
1653
- data .to_netcdf (tmp_file , engine = 'scipy' , ** save_kwargs )
1654
- with open_dataset (tmp_file , engine = 'pynio' ,
1655
- autoclose = self .autoclose , ** open_kwargs ) as ds :
1656
- yield ds
1644
+ def open (self , path , ** kwargs ):
1645
+ with open_dataset (path , engine = 'pynio' , autoclose = self .autoclose ,
1646
+ ** kwargs ) as ds :
1647
+ yield ds
1648
+
1649
+ def save (self , dataset , path , ** kwargs ):
1650
+ dataset .to_netcdf (path , engine = 'scipy' , ** kwargs )
1657
1651
1658
1652
def test_weakrefs (self ):
1659
1653
example = Dataset ({'foo' : ('x' , np .arange (5.0 ))})
0 commit comments