Skip to content

Commit

Permalink
Last batch of ruff rules (zarr-developers#658)
Browse files Browse the repository at this point in the history
* Enforce ruff rule RUF001

Disable the rule directly in the incriminated test file.

* Enforce ruff/pyupgrade rule UP007

UP007 Use `X | Y` for type annotations

* Enforce ruff/flake8-bugbear rules B028 and B904

* Exclude c-blosc from ruff linting

* Ignore ruff/pycodestyle rule W391

astral-sh/ruff#13763

* Enforce ruff/flake8-logging rules (LOG)

* Enforce ruff/flake8-implicit-str-concat rules (ISC)

* Enforce ruff/flake8-executable rules (EXE)

* Enforce ruff/flake8-future-annotations rules (FA)

* Enforce ruff/flake8-return rules (RET)

* Enforce ruff/flake8-slots rules (SLOT)

* Add "numcodecs" to "src"

The directories to consider when resolving first- vs. third-party imports.

* Enforce ruff/flake8-simplify rules (SIM)

* Enforce ruff/flake8-tidy-imports rules (TID)

* Conflicting ruff lint rules

The linter includes some rules that, when enabled, can cause conflicts
with the formatter, leading to unexpected behavior. None None of these
rules are included in Ruff's default configuration. However, since we
have enabled some relevant rule sets, we disable these rules.

* Enforce ruff/tryceratops rules (TRY)

* Update ruff to 0.8.0

* Ignore ruff/flake8-simplify rule SIM105

SIM105 Use `contextlib.suppress(ImportError)` instead of `try`-`except`-`pass`

* Ignore ruff/tryceratops rule TRY301

TRY301 Abstract `raise` to an inner function

* Apply ruff/flake8-bugbear rule B904

B904 Within an `except` clause, raise exceptions with `raise ... from err`
     or `raise ... from None` to distinguish them from errors in exception
     handling

* Apply ruff/flake8-bugbear rule B028

B028 No explicit `stacklevel` keyword argument found

* Apply ruff rule RUF022

RUF022 `__all__` is not sorted
  • Loading branch information
DimitriPapadopoulos authored Dec 28, 2024
1 parent bfa1c0d commit 9cd6bde
Show file tree
Hide file tree
Showing 18 changed files with 74 additions and 82 deletions.
3 changes: 1 addition & 2 deletions adhoc/blosc_memleak_check.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import sys

import numcodecs
import numpy as np
from numpy.testing import assert_array_equal

import numcodecs

codec = numcodecs.Blosc()
data = np.arange(int(sys.argv[1]))
for _ in range(int(sys.argv[2])):
Expand Down
3 changes: 1 addition & 2 deletions notebooks/benchmark_vlen.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,8 @@
}
],
"source": [
"import numpy as np\n",
"\n",
"import numcodecs\n",
"import numpy as np\n",
"\n",
"numcodecs.__version__"
]
Expand Down
8 changes: 2 additions & 6 deletions numcodecs/astype.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,7 @@ def encode(self, buf):
arr = ensure_ndarray(buf).view(self.decode_dtype)

# convert and copy
enc = arr.astype(self.encode_dtype)

return enc
return arr.astype(self.encode_dtype)

def decode(self, buf, out=None):
# normalise input
Expand All @@ -61,9 +59,7 @@ def decode(self, buf, out=None):
dec = enc.astype(self.decode_dtype)

# handle output
out = ndarray_copy(dec, out)

return out
return ndarray_copy(dec, out)

def get_config(self):
return {
Expand Down
3 changes: 1 addition & 2 deletions numcodecs/base64.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,7 @@ def encode(self, buf):
# normalise inputs
buf = ensure_contiguous_ndarray(buf)
# do compression
compressed = _base64.standard_b64encode(buf)
return compressed
return _base64.standard_b64encode(buf)

def decode(self, buf, out=None):
# normalise inputs
Expand Down
7 changes: 2 additions & 5 deletions numcodecs/categorize.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,18 +80,15 @@ def decode(self, buf, out=None):
dec[enc == (i + 1)] = label

# handle output
dec = ndarray_copy(dec, out)

return dec
return ndarray_copy(dec, out)

def get_config(self):
config = {
return {
'id': self.codec_id,
'labels': self.labels,
'dtype': self.dtype.str,
'astype': self.astype.str,
}
return config

def __repr__(self):
# make sure labels part is not too long
Expand Down
4 changes: 1 addition & 3 deletions numcodecs/delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,7 @@ def decode(self, buf, out=None):
np.cumsum(enc, out=dec)

# handle output
out = ndarray_copy(dec, out)

return out
return ndarray_copy(dec, out)

def get_config(self):
# override to handle encoding dtypes
Expand Down
4 changes: 1 addition & 3 deletions numcodecs/fixedscaleoffset.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,7 @@ def encode(self, buf):
enc = np.around(enc)

# convert dtype
enc = enc.astype(self.astype, copy=False)

return enc
return enc.astype(self.astype, copy=False)

def decode(self, buf, out=None):
# interpret buffer as numpy array
Expand Down
4 changes: 1 addition & 3 deletions numcodecs/gzip.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ def encode(self, buf):
compressed = io.BytesIO()
with _gzip.GzipFile(fileobj=compressed, mode='wb', compresslevel=self.level) as compressor:
compressor.write(buf)
compressed = compressed.getvalue()

return compressed
return compressed.getvalue()

# noinspection PyMethodMayBeStatic
def decode(self, buf, out=None):
Expand Down
2 changes: 1 addition & 1 deletion numcodecs/lzma.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
try:
import lzma as _lzma
except ImportError: # pragma: no cover
try:
try: # noqa: SIM105
from backports import lzma as _lzma # type: ignore[no-redef]
except ImportError:
pass
Expand Down
4 changes: 2 additions & 2 deletions numcodecs/ndarray_like.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, ClassVar, Optional, Protocol, runtime_checkable
from typing import Any, ClassVar, Protocol, runtime_checkable


class _CachedProtocolMeta(Protocol.__class__): # type: ignore[name-defined]
Expand Down Expand Up @@ -53,7 +53,7 @@ def __getitem__(self, key) -> Any: ... # pragma: no cover

def __setitem__(self, key, value): ... # pragma: no cover

def tobytes(self, order: Optional[str] = ...) -> bytes: ... # pragma: no cover
def tobytes(self, order: str | None = ...) -> bytes: ... # pragma: no cover

def reshape(self, *shape: int, order: str = ...) -> "NDArrayLike": ... # pragma: no cover

Expand Down
7 changes: 3 additions & 4 deletions numcodecs/pcodec.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Literal, Optional
from typing import Literal

from numcodecs.abc import Codec
from numcodecs.compat import ensure_contiguous_ndarray
Expand Down Expand Up @@ -55,7 +55,7 @@ def __init__(
mode_spec: Literal["auto", "classic"] = "auto",
delta_spec: Literal["auto", "none", "try_consecutive", "try_lookback"] = "auto",
paging_spec: Literal["equal_pages_up_to"] = "equal_pages_up_to",
delta_encoding_order: Optional[int] = None,
delta_encoding_order: int | None = None,
equal_pages_up_to: int = DEFAULT_MAX_PAGE_N,
):
if standalone is None: # pragma: no cover
Expand Down Expand Up @@ -105,13 +105,12 @@ def _get_chunk_config(self):
case _:
raise ValueError(f"paging_spec {self.paging_spec} is not supported")

config = ChunkConfig(
return ChunkConfig(
compression_level=self.level,
delta_spec=delta_spec,
mode_spec=mode_spec,
paging_spec=paging_spec,
)
return config

def encode(self, buf):
buf = ensure_contiguous_ndarray(buf)
Expand Down
4 changes: 1 addition & 3 deletions numcodecs/quantize.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@ def encode(self, buf):
enc = np.around(scale * arr) / scale

# cast dtype
enc = enc.astype(self.astype, copy=False)

return enc
return enc.astype(self.astype, copy=False)

def decode(self, buf, out=None):
# filter is lossy, decoding is no-op
Expand Down
7 changes: 3 additions & 4 deletions numcodecs/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import pytest
from numpy.testing import assert_array_almost_equal, assert_array_equal

# star import needed for repr tests so eval finds names
from numcodecs import * # noqa: F403
from numcodecs import * # noqa: F403 # for eval to find names in repr tests
from numcodecs.compat import ensure_bytes, ensure_ndarray
from numcodecs.registry import get_codec

Expand All @@ -19,9 +18,9 @@
'Hei maailma!',
'Xin chào thế giới',
'Njatjeta Botë!',
'Γεια σου κόσμε!',
'Γεια σου κόσμε!', # noqa: RUF001
'こんにちは世界',
'世界,你好!',
'世界,你好!', # noqa: RUF001
'Helló, világ!',
'Zdravo svete!',
'เฮลโลเวิลด์',
Expand Down
6 changes: 2 additions & 4 deletions numcodecs/tests/test_blosc.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,14 +215,12 @@ def test_backwards_compatibility():

def _encode_worker(data):
compressor = Blosc(cname='zlib', clevel=9, shuffle=Blosc.SHUFFLE)
enc = compressor.encode(data)
return enc
return compressor.encode(data)


def _decode_worker(enc):
compressor = Blosc()
data = compressor.decode(enc)
return data
return compressor.decode(enc)


@pytest.mark.parametrize('pool', [Pool, ThreadPool])
Expand Down
6 changes: 2 additions & 4 deletions numcodecs/tests/test_shuffle.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,12 @@ def test_eq():

def _encode_worker(data):
compressor = Shuffle()
enc = compressor.encode(data)
return enc
return compressor.encode(data)


def _decode_worker(enc):
compressor = Shuffle()
data = compressor.decode(enc)
return data
return compressor.decode(enc)


@pytest.mark.parametrize('pool', [Pool, ThreadPool])
Expand Down
7 changes: 5 additions & 2 deletions numcodecs/zarr3.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,10 @@

if zarr.__version__ < "3.0.0": # pragma: no cover
raise ImportError("zarr 3.0.0 or later is required to use the numcodecs zarr integration.")
except ImportError: # pragma: no cover
raise ImportError("zarr 3.0.0 or later is required to use the numcodecs zarr integration.")
except ImportError as e: # pragma: no cover
raise ImportError(
"zarr 3.0.0 or later is required to use the numcodecs zarr integration."
) from e

from zarr.abc.codec import ArrayArrayCodec, ArrayBytesCodec, BytesBytesCodec
from zarr.abc.metadata import Metadata
Expand Down Expand Up @@ -95,6 +97,7 @@ def __init__(self, **codec_config: JSON) -> None:
"Numcodecs codecs are not in the Zarr version 3 specification and "
"may not be supported by other zarr implementations.",
category=UserWarning,
stacklevel=2,
)

@cached_property
Expand Down
45 changes: 37 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -164,39 +164,68 @@ environment = { DISABLE_NUMCODECS_AVX2=1, DISABLE_NUMCODECS_SSE2=1 }

[tool.ruff]
line-length = 100
extend-exclude = ["c-blosc"]
src = ["numcodecs"]

[tool.ruff.lint]
extend-select = [
"B",
"C4",
"EXE",
"FA",
"FLY",
"FURB",
"G",
"I",
"ISC",
"LOG",
"PERF",
"PGH",
"PIE",
"PT",
"PYI",
"RET",
"RSE",
"RUF",
"SIM",
"SLOT",
"TID",
"TRY",
"UP",
# "W", https://github.com/astral-sh/ruff/issues/13763
]
ignore = [
"B028",
"B904",
"FURB101",
"FURB103",
"PT001",
"PT004", # deprecated
"PT005", # deprecated
"PT011",
"RUF001",
"PT012",
"RET505",
"RET506",
"SIM108",
"TRY003",
"TRY301",
"UP007",
"UP027", # deprecated
"UP038", # https://github.com/astral-sh/ruff/issues/7871
]
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
]

[tool.ruff.lint.extend-per-file-ignores]
"numcodecs/tests/**" = ["SIM201", "SIM202", "SIM300", "TRY002"]
"notebooks/**" = ["W391"] # https://github.com/astral-sh/ruff/issues/13763

[tool.ruff.format]
quote-style = "preserve"
Expand Down
Loading

0 comments on commit 9cd6bde

Please sign in to comment.