Skip to content

Commit bac1265

Browse files
authored
Reverse type checks for better type inheritance (#8313)
* Reverse type checks * remove ignores not needed yet
1 parent 4520ce9 commit bac1265

File tree

1 file changed

+23
-11
lines changed

1 file changed

+23
-11
lines changed

xarray/core/variable.py

+23-11
Original file line numberDiff line numberDiff line change
@@ -901,20 +901,20 @@ def _copy(
901901
if data is None:
902902
data_old = self._data
903903

904-
if isinstance(data_old, indexing.MemoryCachedArray):
904+
if not isinstance(data_old, indexing.MemoryCachedArray):
905+
ndata = data_old
906+
else:
905907
# don't share caching between copies
906908
ndata = indexing.MemoryCachedArray(data_old.array)
907-
else:
908-
ndata = data_old
909909

910910
if deep:
911911
ndata = copy.deepcopy(ndata, memo)
912912

913913
else:
914914
ndata = as_compatible_data(data)
915-
if self.shape != ndata.shape: # type: ignore[attr-defined]
915+
if self.shape != ndata.shape:
916916
raise ValueError(
917-
f"Data shape {ndata.shape} must match shape of object {self.shape}" # type: ignore[attr-defined]
917+
f"Data shape {ndata.shape} must match shape of object {self.shape}"
918918
)
919919

920920
attrs = copy.deepcopy(self._attrs, memo) if deep else copy.copy(self._attrs)
@@ -1043,7 +1043,9 @@ def chunk(
10431043
if chunkmanager.is_chunked_array(data_old):
10441044
data_chunked = chunkmanager.rechunk(data_old, chunks)
10451045
else:
1046-
if isinstance(data_old, indexing.ExplicitlyIndexed):
1046+
if not isinstance(data_old, indexing.ExplicitlyIndexed):
1047+
ndata = data_old
1048+
else:
10471049
# Unambiguously handle array storage backends (like NetCDF4 and h5py)
10481050
# that can't handle general array indexing. For example, in netCDF4 you
10491051
# can do "outer" indexing along two dimensions independent, which works
@@ -1055,8 +1057,6 @@ def chunk(
10551057
ndata = indexing.ImplicitToExplicitIndexingAdapter(
10561058
data_old, indexing.OuterIndexer
10571059
)
1058-
else:
1059-
ndata = data_old
10601060

10611061
if utils.is_dict_like(chunks):
10621062
chunks = tuple(chunks.get(n, s) for n, s in enumerate(ndata.shape))
@@ -1504,7 +1504,9 @@ def _stack_once(self, dims: list[Hashable], new_dim: Hashable):
15041504
new_data = duck_array_ops.reshape(reordered.data, new_shape)
15051505
new_dims = reordered.dims[: len(other_dims)] + (new_dim,)
15061506

1507-
return Variable(new_dims, new_data, self._attrs, self._encoding, fastpath=True)
1507+
return type(self)(
1508+
new_dims, new_data, self._attrs, self._encoding, fastpath=True
1509+
)
15081510

15091511
def stack(self, dimensions=None, **dimensions_kwargs):
15101512
"""
@@ -2760,7 +2762,7 @@ def concat(
27602762

27612763
return cls(first_var.dims, data, attrs)
27622764

2763-
def copy(self, deep: bool = True, data: ArrayLike | None = None):
2765+
def copy(self, deep: bool = True, data: T_DuckArray | ArrayLike | None = None):
27642766
"""Returns a copy of this object.
27652767
27662768
`deep` is ignored since data is stored in the form of
@@ -2785,7 +2787,17 @@ def copy(self, deep: bool = True, data: ArrayLike | None = None):
27852787
data copied from original.
27862788
"""
27872789
if data is None:
2788-
ndata = self._data.copy(deep=deep)
2790+
data_old = self._data
2791+
2792+
if not isinstance(data_old, indexing.MemoryCachedArray):
2793+
ndata = data_old
2794+
else:
2795+
# don't share caching between copies
2796+
ndata = indexing.MemoryCachedArray(data_old.array)
2797+
2798+
if deep:
2799+
ndata = copy.deepcopy(ndata, None)
2800+
27892801
else:
27902802
ndata = as_compatible_data(data)
27912803
if self.shape != ndata.shape:

0 commit comments

Comments
 (0)