From cba7e1c333b0bef688465cf42558ac4b0a267e88 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 14:14:09 +0200 Subject: [PATCH 01/11] Add --fix to ruff linter in pre-commit --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a4b3abe..c1d8b18 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,5 +11,6 @@ repos: hooks: # Run the linter. - id: ruff + args: [ --fix ] # Run the formatter. - id: ruff-format From 73bd5e9523372e52d8898e9928dbbae6c4a35d31 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 16:40:17 +0200 Subject: [PATCH 02/11] Don't use Sequence in BaseModel, ok to use list instead --- src/patito/_pydantic/column_info.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/patito/_pydantic/column_info.py b/src/patito/_pydantic/column_info.py index ca47219..29b1ef9 100644 --- a/src/patito/_pydantic/column_info.py +++ b/src/patito/_pydantic/column_info.py @@ -2,7 +2,6 @@ import io import json -from collections.abc import Sequence from typing import Annotated, Optional, Union import polars as pl @@ -101,7 +100,7 @@ class ColumnInfo(BaseModel, arbitrary_types_allowed=True): Optional[Union[DataTypeClass, DataType]], BeforeValidator(dtype_deserializer) ] = None constraints: Annotated[ - Optional[Union[pl.Expr, Sequence[pl.Expr]]], BeforeValidator(expr_deserializer) + Optional[Union[pl.Expr, list[pl.Expr]]], BeforeValidator(expr_deserializer) ] = None derived_from: Annotated[ Optional[Union[str, pl.Expr]], BeforeValidator(expr_or_col_name_deserializer) From 117124dd8c7e71481cf9805fe67da08ef24cc6c5 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 16:43:04 +0200 Subject: [PATCH 03/11] Set sensible config for pyright and ruff --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7a664da..a1765a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,8 +99,10 @@ show_missing = true profile = "black" [tool.pyright] +typeCheckingMode = "basic" venvPath = "." venv = ".venv" +pythonVersion = "3.9" exclude = [ ".venv", @@ -135,6 +137,7 @@ module = ["tests.test_validators"] warn_unused_ignores = false [tool.ruff] +target-version = "py39" extend-exclude= ["tests/__init__.py"] [tool.ruff.lint] From d973412bab3563d164c08efbfbf8ba3f0e74e755 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 16:56:19 +0200 Subject: [PATCH 04/11] add UP to ruff config --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a1765a6..abb8124 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -141,7 +141,7 @@ target-version = "py39" extend-exclude= ["tests/__init__.py"] [tool.ruff.lint] -select = ["E4", "E7", "E9", "F", "I", "B", "D"] +select = ["E4", "E7", "E9", "F", "I", "B", "D", "UP"] [tool.ruff.lint.pydocstyle] convention = "google" From e2c0a666dc258ee66b73296e0471a3b9a68f7fcc Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 17:02:49 +0200 Subject: [PATCH 05/11] Prevent ruff UP from upgrading syntax within BaseModel --- src/patito/_pydantic/column_info.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/patito/_pydantic/column_info.py b/src/patito/_pydantic/column_info.py index 29b1ef9..5c30a16 100644 --- a/src/patito/_pydantic/column_info.py +++ b/src/patito/_pydantic/column_info.py @@ -97,15 +97,18 @@ class ColumnInfo(BaseModel, arbitrary_types_allowed=True): """ dtype: Annotated[ - Optional[Union[DataTypeClass, DataType]], BeforeValidator(dtype_deserializer) + Optional[Union[DataTypeClass, DataType]], # noqa: UP007 + BeforeValidator(dtype_deserializer), ] = None constraints: Annotated[ - Optional[Union[pl.Expr, list[pl.Expr]]], BeforeValidator(expr_deserializer) + Optional[Union[pl.Expr, list[pl.Expr]]], # noqa: UP007 + BeforeValidator(expr_deserializer), ] = None derived_from: Annotated[ - Optional[Union[str, pl.Expr]], BeforeValidator(expr_or_col_name_deserializer) + Optional[Union[str, pl.Expr]], # noqa: UP007 + BeforeValidator(expr_or_col_name_deserializer), ] = None - unique: Optional[bool] = None + unique: Optional[bool] = None # noqa : UP007 def __repr__(self) -> str: """Print only Field attributes whose values are not default (mainly None).""" From f52b74f0aef1583b40313929013a750470e92b84 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 17:09:44 +0200 Subject: [PATCH 06/11] ruff UP test_dtypes without changing Sequence import Pydantic doesn't like Sequence being imported from there --- tests/test_dtypes.py | 50 ++++++++++++++++++++++---------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/tests/test_dtypes.py b/tests/test_dtypes.py index 5df7879..4ffa388 100644 --- a/tests/test_dtypes.py +++ b/tests/test_dtypes.py @@ -4,7 +4,7 @@ import sys from datetime import date, datetime, time, timedelta -from typing import Dict, List, Literal, Optional, Sequence, Tuple, Union +from typing import Literal, Optional, Sequence, Union # noqa: UP035 import polars as pl import pytest @@ -75,32 +75,32 @@ def test_valids_basic_annotations() -> None: def test_valids_nested_annotations() -> None: """Test type annotations match nested polars types like List.""" - assert len(DtypeResolver(List).valid_polars_dtypes()) == 0 # needs inner annotation + assert len(DtypeResolver(list).valid_polars_dtypes()) == 0 # needs inner annotation assert ( - DtypeResolver(Tuple).valid_polars_dtypes() - == DtypeResolver(List).valid_polars_dtypes() + DtypeResolver(tuple).valid_polars_dtypes() + == DtypeResolver(list).valid_polars_dtypes() == DtypeResolver(Sequence).valid_polars_dtypes() ) # for now, these are the same - assert DtypeResolver(List[str]).valid_polars_dtypes() == {pl.List(pl.String)} - assert DtypeResolver(Optional[List[str]]).valid_polars_dtypes() == { + assert DtypeResolver(list[str]).valid_polars_dtypes() == {pl.List(pl.String)} + assert DtypeResolver(Optional[list[str]]).valid_polars_dtypes() == { pl.List(pl.String) } - assert len(DtypeResolver(List[int]).valid_polars_dtypes()) == len( + assert len(DtypeResolver(list[int]).valid_polars_dtypes()) == len( DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) ) - assert len(DtypeResolver(List[Union[int, float]]).valid_polars_dtypes()) == len( + assert len(DtypeResolver(list[Union[int, float]]).valid_polars_dtypes()) == len( FLOAT_DTYPES ) - assert len(DtypeResolver(List[Optional[int]]).valid_polars_dtypes()) == len( + assert len(DtypeResolver(list[Optional[int]]).valid_polars_dtypes()) == len( DataTypeGroup(INTEGER_DTYPES | FLOAT_DTYPES) ) - assert DtypeResolver(List[List[str]]).valid_polars_dtypes() == { + assert DtypeResolver(list[list[str]]).valid_polars_dtypes() == { pl.List(pl.List(pl.String)) } # recursion works as expected assert ( - DtypeResolver(Dict).valid_polars_dtypes() == frozenset() + DtypeResolver(dict).valid_polars_dtypes() == frozenset() ) # not currently supported # support for nested models via struct @@ -121,7 +121,7 @@ def test_dtype_validation() -> None: validate_polars_dtype(int, pl.String) with pytest.raises(ValueError, match="Invalid dtype"): - validate_polars_dtype(List[str], pl.List(pl.Float64)) + validate_polars_dtype(list[str], pl.List(pl.Float64)) # some potential corner cases validate_polars_dtype(AwareDatetime, dtype=pl.Datetime(time_zone="UTC")) @@ -157,29 +157,29 @@ def test_defaults_basic_annotations() -> None: def test_defaults_nested_annotations() -> None: """Ensure python nested types fallback to largest nested polars type.""" - assert DtypeResolver(List).default_polars_dtype() is None # needs inner annotation + assert DtypeResolver(list).default_polars_dtype() is None # needs inner annotation - assert DtypeResolver(List[str]).default_polars_dtype() == pl.List(pl.String) - assert DtypeResolver(Optional[List[str]]).default_polars_dtype() == pl.List( + assert DtypeResolver(list[str]).default_polars_dtype() == pl.List(pl.String) + assert DtypeResolver(Optional[list[str]]).default_polars_dtype() == pl.List( pl.String ) - assert DtypeResolver(List[int]).default_polars_dtype() == pl.List(pl.Int64) - assert DtypeResolver(List[Optional[int]]).default_polars_dtype() == pl.List( + assert DtypeResolver(list[int]).default_polars_dtype() == pl.List(pl.Int64) + assert DtypeResolver(list[Optional[int]]).default_polars_dtype() == pl.List( pl.Int64 ) - assert DtypeResolver(List[Union[int, float]]).default_polars_dtype() is None - assert DtypeResolver(List[Union[str, int]]).default_polars_dtype() is None - assert DtypeResolver(List[List[str]]).default_polars_dtype() == pl.List( + assert DtypeResolver(list[Union[int, float]]).default_polars_dtype() is None + assert DtypeResolver(list[Union[str, int]]).default_polars_dtype() is None + assert DtypeResolver(list[list[str]]).default_polars_dtype() == pl.List( pl.List(pl.String) ) # recursion works as expected - assert DtypeResolver(List[List[Optional[str]]]).default_polars_dtype() == pl.List( + assert DtypeResolver(list[list[Optional[str]]]).default_polars_dtype() == pl.List( pl.List(pl.String) ) with pytest.raises( NotImplementedError, match="dictionaries not currently supported" ): - DtypeResolver(Dict).default_polars_dtype() + DtypeResolver(dict).default_polars_dtype() # support for nested models via struct many_types = DtypeResolver(ManyTypes).default_polars_dtype() @@ -198,8 +198,8 @@ def test_annotation_validation() -> None: with pytest.raises(ValueError, match="not compatible with any polars dtypes"): validate_annotation(Union[str, int]) - validate_annotation(List[Optional[int]]) + validate_annotation(list[Optional[int]]) with pytest.raises(ValueError, match="not compatible with any polars dtypes"): - validate_annotation(List[Union[str, int]]) + validate_annotation(list[Union[str, int]]) with pytest.raises(ValueError, match="Valid dtypes are:"): - validate_annotation(List[Union[int, float]]) + validate_annotation(list[Union[int, float]]) From ef03ecdb49b37d85acbcb431a8eaef6770f7bd95 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 17:11:23 +0200 Subject: [PATCH 07/11] Ruff UP the entire codebase --- src/patito/_pydantic/dtypes/dtypes.py | 21 ++--- src/patito/_pydantic/dtypes/utils.py | 16 ++-- src/patito/_pydantic/repr.py | 22 ++--- src/patito/_pydantic/schema.py | 15 ++-- src/patito/exceptions.py | 27 +++--- src/patito/polars.py | 66 +++++++-------- src/patito/pydantic.py | 114 ++++++++++++-------------- src/patito/validators.py | 17 ++-- tests/examples.py | 8 +- tests/test_dummy_data.py | 7 +- tests/test_model.py | 24 +++--- tests/test_validators.py | 28 +++---- 12 files changed, 165 insertions(+), 200 deletions(-) diff --git a/src/patito/_pydantic/dtypes/dtypes.py b/src/patito/_pydantic/dtypes/dtypes.py index 2578569..f0dc14e 100644 --- a/src/patito/_pydantic/dtypes/dtypes.py +++ b/src/patito/_pydantic/dtypes/dtypes.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import Mapping from functools import cache, reduce from operator import and_ -from typing import TYPE_CHECKING, Any, Dict, FrozenSet, Mapping, Optional, Type +from typing import TYPE_CHECKING, Any import polars as pl from polars.datatypes import DataType, DataTypeClass @@ -25,8 +26,8 @@ @cache def valid_dtypes_for_model( - cls: Type[ModelType], -) -> Mapping[str, FrozenSet[DataTypeClass]]: + cls: type[ModelType], +) -> Mapping[str, frozenset[DataTypeClass]]: return { column: ( DtypeResolver(cls.model_fields[column].annotation).valid_polars_dtypes() @@ -39,7 +40,7 @@ def valid_dtypes_for_model( @cache def default_dtypes_for_model( - cls: Type[ModelType], + cls: type[ModelType], ) -> dict[str, DataType]: default_dtypes: dict[str, DataType] = {} for column in cls.columns: @@ -57,7 +58,7 @@ def default_dtypes_for_model( def validate_polars_dtype( annotation: type[Any] | None, dtype: DataType | DataTypeClass | None, - column: Optional[str] = None, + column: str | None = None, ) -> None: """Check that the polars dtype is valid for the given annotation. Raises ValueError if not. @@ -84,7 +85,7 @@ def validate_polars_dtype( def validate_annotation( - annotation: type[Any] | Any | None, column: Optional[str] = None + annotation: type[Any] | Any | None, column: str | None = None ) -> None: """Check that the provided annotation has polars/patito support (we can resolve it to a default dtype). Raises ValueError if not. @@ -129,7 +130,7 @@ def default_polars_dtype(self) -> DataType | None: def _valid_polars_dtypes_for_schema( self, - schema: Dict, + schema: dict, ) -> DataTypeGroup: valid_type_sets = [] if "anyOf" in schema: @@ -146,7 +147,7 @@ def _valid_polars_dtypes_for_schema( def _pydantic_subschema_to_valid_polars_types( self, - props: Dict, + props: dict, ) -> DataTypeGroup: if "type" not in props: if "enum" in props: @@ -189,7 +190,7 @@ def _pydantic_subschema_to_valid_polars_types( PydanticBaseType(pyd_type), props.get("format"), props.get("enum") ) - def _default_polars_dtype_for_schema(self, schema: Dict) -> DataType | None: + def _default_polars_dtype_for_schema(self, schema: dict) -> DataType | None: if "anyOf" in schema: if len(schema["anyOf"]) == 2: # look for optionals first schema = _without_optional(schema) @@ -205,7 +206,7 @@ def _default_polars_dtype_for_schema(self, schema: Dict) -> DataType | None: def _pydantic_subschema_to_default_dtype( self, - props: Dict, + props: dict, ) -> DataType | None: if "column_info" in props: # user has specified in patito model ci = ColumnInfo.model_validate_json(props["column_info"]) diff --git a/src/patito/_pydantic/dtypes/utils.py b/src/patito/_pydantic/dtypes/utils.py index 8d6eb4a..8e83bcc 100644 --- a/src/patito/_pydantic/dtypes/utils.py +++ b/src/patito/_pydantic/dtypes/utils.py @@ -1,14 +1,10 @@ from __future__ import annotations import sys +from collections.abc import Sequence from enum import Enum from typing import ( Any, - Dict, - List, - Optional, - Sequence, - Type, Union, get_args, get_origin, @@ -87,7 +83,7 @@ def is_optional(type_annotation: type[Any] | Any | None) -> bool: ) -def unwrap_optional(type_annotation: Type[Any] | Any) -> Type: +def unwrap_optional(type_annotation: type[Any] | Any) -> type: """Return the inner, wrapped type of an Optional. Is a no-op for non-Optional types. @@ -115,14 +111,14 @@ def parse_composite_dtype(dtype: DataTypeClass | DataType) -> str: return str(dtype) -def dtype_from_string(v: str) -> Optional[Union[DataTypeClass, DataType]]: +def dtype_from_string(v: str) -> DataTypeClass | DataType | None: """For deserialization.""" # TODO test all dtypes return convert.dtype_short_repr_to_dtype(v) def _pyd_type_to_valid_dtypes( - pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None + pyd_type: PydanticBaseType, string_format: str | None, enum: list[str] | None ) -> DataTypeGroup: if enum is not None: _validate_enum_values(pyd_type, enum) @@ -149,7 +145,7 @@ def _pyd_type_to_valid_dtypes( def _pyd_type_to_default_dtype( - pyd_type: PydanticBaseType, string_format: Optional[str], enum: List[str] | None + pyd_type: PydanticBaseType, string_format: str | None, enum: list[str] | None ) -> DataTypeClass | DataType: if enum is not None: _validate_enum_values(pyd_type, enum) @@ -215,7 +211,7 @@ def _pyd_string_format_to_default_dtype( raise NotImplementedError -def _without_optional(schema: Dict) -> Dict: +def _without_optional(schema: dict) -> dict: if "anyOf" in schema: for sub_props in schema["anyOf"]: if "type" in sub_props and sub_props["type"] == "null": diff --git a/src/patito/_pydantic/repr.py b/src/patito/_pydantic/repr.py index 9a2763e..62ed80b 100644 --- a/src/patito/_pydantic/repr.py +++ b/src/patito/_pydantic/repr.py @@ -1,26 +1,23 @@ import sys import types import typing +from collections.abc import Generator, Iterable, Sequence from typing import ( Any, Callable, - Generator, - Iterable, Literal, Optional, - Sequence, - Tuple, - Type, Union, get_args, get_origin, ) +from typing import GenericAlias as TypingGenericAlias # type: ignore if typing.TYPE_CHECKING: - Loc = Tuple[Union[int, str], ...] - ReprArgs = Sequence[Tuple[Optional[str], Any]] + Loc = tuple[Union[int, str], ...] + ReprArgs = Sequence[tuple[Optional[str], Any]] RichReprResult = Iterable[ - Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]] + Union[Any, tuple[Any], tuple[str, Any], tuple[str, Any, Any]] ] try: @@ -30,15 +27,10 @@ typing_base = _TypingBase -if sys.version_info < (3, 9): - # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) - TypingGenericAlias = () -else: - from typing import GenericAlias as TypingGenericAlias # type: ignore if sys.version_info < (3, 10): - def origin_is_union(tp: Optional[Type[Any]]) -> bool: + def origin_is_union(tp: Optional[type[Any]]) -> bool: return tp is typing.Union WithArgsTypes = (TypingGenericAlias,) @@ -58,7 +50,7 @@ class Representation: of objects. """ - __slots__: Tuple[str, ...] = tuple() + __slots__: tuple[str, ...] = tuple() def __repr_args__(self) -> "ReprArgs": """Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. diff --git a/src/patito/_pydantic/schema.py b/src/patito/_pydantic/schema.py index 10f42bc..78f5159 100644 --- a/src/patito/_pydantic/schema.py +++ b/src/patito/_pydantic/schema.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Mapping from functools import cache -from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Type, get_args +from typing import TYPE_CHECKING, Any, get_args from pydantic.fields import FieldInfo @@ -13,7 +14,7 @@ @cache -def schema_for_model(cls: Type[ModelType]) -> Dict[str, Dict[str, Any]]: +def schema_for_model(cls: type[ModelType]) -> dict[str, dict[str, Any]]: """Return schema properties where definition references have been resolved. Returns: @@ -46,7 +47,7 @@ def schema_for_model(cls: Type[ModelType]) -> Dict[str, Dict[str, Any]]: @cache -def column_infos_for_model(cls: Type[ModelType]) -> Mapping[str, ColumnInfo]: +def column_infos_for_model(cls: type[ModelType]) -> Mapping[str, ColumnInfo]: fields = cls.model_fields def get_column_info(field: FieldInfo) -> ColumnInfo: @@ -62,11 +63,11 @@ def get_column_info(field: FieldInfo) -> ColumnInfo: def _append_field_info_to_props( - field_info: Dict[str, Any], + field_info: dict[str, Any], field_name: str, - model_schema: Dict[str, Any], - required: Optional[bool] = None, -) -> Dict[str, Any]: + model_schema: dict[str, Any], + required: bool | None = None, +) -> dict[str, Any]: if "$ref" in field_info: # TODO onto runtime append definition = model_schema["$defs"][field_info["$ref"]] if "enum" in definition and "type" not in definition: diff --git a/src/patito/exceptions.py b/src/patito/exceptions.py index 6edf743..5e8f0f5 100644 --- a/src/patito/exceptions.py +++ b/src/patito/exceptions.py @@ -1,15 +1,10 @@ """Exceptions used by patito.""" +from collections.abc import Generator, Sequence from typing import ( TYPE_CHECKING, Any, - Dict, - Generator, - List, Optional, - Sequence, - Tuple, - Type, TypedDict, Union, ) @@ -19,7 +14,7 @@ if TYPE_CHECKING: from pydantic import BaseModel - Loc = Tuple[Union[int, str], ...] + Loc = tuple[Union[int, str], ...] class _ErrorDictRequired(TypedDict): loc: Loc @@ -27,7 +22,7 @@ class _ErrorDictRequired(TypedDict): type: str class ErrorDict(_ErrorDictRequired, total=False): - ctx: Dict[str, Any] + ctx: dict[str, Any] from patito._pydantic.repr import ReprArgs @@ -67,13 +62,13 @@ class DataFrameValidationError(Representation, ValueError): __slots__ = "raw_errors", "model", "_error_cache" - def __init__(self, errors: Sequence[ErrorList], model: Type["BaseModel"]) -> None: + def __init__(self, errors: Sequence[ErrorList], model: type["BaseModel"]) -> None: """Create a dataframe validation error.""" self.raw_errors = errors self.model = model - self._error_cache: Optional[List["ErrorDict"]] = None + self._error_cache: Optional[list[ErrorDict]] = None - def errors(self) -> List["ErrorDict"]: + def errors(self) -> list["ErrorDict"]: """Get list of errors.""" if self._error_cache is None: self._error_cache = list(flatten_errors(self.raw_errors)) @@ -93,7 +88,7 @@ def __repr_args__(self) -> "ReprArgs": return [("model", self.model.__name__), ("errors", self.errors())] -def display_errors(errors: List["ErrorDict"]) -> str: +def display_errors(errors: list["ErrorDict"]) -> str: return "\n".join( f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors @@ -142,7 +137,7 @@ def error_dict(exc: Exception, loc: "Loc") -> "ErrorDict": else: msg = str(exc) - d: "ErrorDict" = {"loc": loc, "msg": msg, "type": type_} + d: ErrorDict = {"loc": loc, "msg": msg, "type": type_} if ctx: d["ctx"] = ctx @@ -150,10 +145,10 @@ def error_dict(exc: Exception, loc: "Loc") -> "ErrorDict": return d -_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} +_EXC_TYPE_CACHE: dict[type[Exception], str] = {} -def get_exc_type(cls: Type[Exception]) -> str: +def get_exc_type(cls: type[Exception]) -> str: # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up try: return _EXC_TYPE_CACHE[cls] @@ -163,7 +158,7 @@ def get_exc_type(cls: Type[Exception]) -> str: return r -def _get_exc_type(cls: Type[Exception]) -> str: +def _get_exc_type(cls: type[Exception]) -> str: if issubclass(cls, AssertionError): return "assertion_error" diff --git a/src/patito/polars.py b/src/patito/polars.py index e51ac09..de5643c 100644 --- a/src/patito/polars.py +++ b/src/patito/polars.py @@ -6,14 +6,9 @@ from typing import ( TYPE_CHECKING, Any, - Dict, Generic, Literal, - Optional, - Tuple, - Type, TypeVar, - Union, cast, ) @@ -57,12 +52,12 @@ def __iter__(self) -> Iterator[ModelType]: # noqa: D105 class LazyFrame(pl.LazyFrame, Generic[ModelType]): """LazyFrame class associated to DataFrame.""" - model: Type[ModelType] + model: type[ModelType] @classmethod def _construct_lazyframe_model_class( - cls: Type[LDF], model: Optional[Type[ModelType]] - ) -> Type[LazyFrame[ModelType]]: + cls: type[LDF], model: type[ModelType] | None + ) -> type[LazyFrame[ModelType]]: """Return custom LazyFrame sub-class where LazyFrame.model is set. Can be used to construct a LazyFrame class where @@ -91,7 +86,7 @@ def collect( self, *args, **kwargs, - ) -> "DataFrame[ModelType]": # noqa: DAR101, DAR201 + ) -> DataFrame[ModelType]: # noqa: DAR101, DAR201 """Collect into a DataFrame. See documentation of polars.DataFrame.collect for full description of @@ -166,8 +161,8 @@ def _derive_column( self, lf: LDF, column_name: str, - column_infos: Dict[str, ColumnInfo], - ) -> Tuple[LDF, Sequence[str]]: + column_infos: dict[str, ColumnInfo], + ) -> tuple[LDF, Sequence[str]]: if ( column_infos.get(column_name, None) is None or column_infos[column_name].derived_from is None @@ -209,7 +204,7 @@ def unalias(self: LDF) -> LDF: return self exprs = [] - def to_expr(va: str | AliasPath | AliasChoices) -> Optional[pl.Expr]: + def to_expr(va: str | AliasPath | AliasChoices) -> pl.Expr | None: if isinstance(va, str): return pl.col(va) if va in self.collect_schema() else None elif isinstance(va, AliasPath): @@ -223,7 +218,7 @@ def to_expr(va: str | AliasPath | AliasChoices) -> Optional[pl.Expr]: else None ) elif isinstance(va, AliasChoices): - local_expr: Optional[pl.Expr] = None + local_expr: pl.Expr | None = None for choice in va.choices: if (part := to_expr(choice)) is not None: local_expr = ( @@ -253,7 +248,7 @@ def to_expr(va: str | AliasPath | AliasChoices) -> Optional[pl.Expr]: return self.select(exprs) def cast( - self: LDF, strict: bool = False, columns: Optional[Sequence[str]] = None + self: LDF, strict: bool = False, columns: Sequence[str] | None = None ) -> LDF: """Cast columns to `dtypes` specified by the associated Patito model. @@ -310,7 +305,7 @@ def cast( return self.with_columns(exprs) @classmethod - def from_existing(cls: Type[LDF], lf: pl.LazyFrame) -> LDF: + def from_existing(cls: type[LDF], lf: pl.LazyFrame) -> LDF: """Construct a patito.DataFrame object from an existing polars.DataFrame object.""" return cls.model.LazyFrame._from_pyldf(lf._ldf).cast() @@ -344,12 +339,12 @@ class DataFrame(pl.DataFrame, Generic[ModelType]): :ref:`Product.validate `. """ - model: Type[ModelType] + model: type[ModelType] @classmethod def _construct_dataframe_model_class( - cls: Type[DF], model: Type[OtherModelType] - ) -> Type[DataFrame[OtherModelType]]: + cls: type[DF], model: type[OtherModelType] + ) -> type[DataFrame[OtherModelType]]: """Return custom DataFrame sub-class where DataFrame.model is set. Can be used to construct a DataFrame class where @@ -463,7 +458,7 @@ def unalias(self: DF) -> DF: return self.lazy().unalias().collect() def cast( - self: DF, strict: bool = False, columns: Optional[Sequence[str]] = None + self: DF, strict: bool = False, columns: Sequence[str] | None = None ) -> DF: """Cast columns to `dtypes` specified by the associated Patito model. @@ -507,7 +502,7 @@ def cast( def drop( self: DF, - columns: Optional[Union[str, Collection[str]]] = None, + columns: str | Collection[str] | None = None, *more_columns: str, ) -> DF: """Drop one or more columns from the dataframe. @@ -547,7 +542,7 @@ def drop( else: return self.drop(list(set(self.columns) - set(self.model.columns))) - def validate(self, columns: Optional[Sequence[str]] = None, **kwargs: Any): + def validate(self, columns: Sequence[str] | None = None, **kwargs: Any): """Validate the schema and content of the dataframe. You must invoke ``.set_model()`` before invoking ``.validate()`` in order @@ -641,13 +636,12 @@ def derive(self: DF, columns: list[str] | None = None) -> DF: def fill_null( self: DF, - value: Optional[Any] = None, - strategy: Optional[ - Literal[ - "forward", "backward", "min", "max", "mean", "zero", "one", "defaults" - ] - ] = None, - limit: Optional[int] = None, + value: Any | None = None, + strategy: Literal[ + "forward", "backward", "min", "max", "mean", "zero", "one", "defaults" + ] + | None = None, + limit: int | None = None, matches_supertype: bool = True, ) -> DF: """Fill null values using a filling strategy, literal, or ``Expr``. @@ -714,7 +708,7 @@ def fill_null( ] ).set_model(self.model) - def get(self, predicate: Optional[pl.Expr] = None) -> ModelType: + def get(self, predicate: pl.Expr | None = None) -> ModelType: """Fetch the single row that matches the given polars predicate. If you expect a data frame to already consist of one single row, @@ -846,7 +840,7 @@ def _iter_models(_df: DF) -> Iterator[ModelType]: return ModelGenerator(_iter_models(df)) - def _pydantic_model(self) -> Type[Model]: + def _pydantic_model(self) -> type[Model]: """Dynamically construct patito model compliant with dataframe. Returns: @@ -858,7 +852,7 @@ def _pydantic_model(self) -> Type[Model]: pydantic_annotations = {column: (Any, ...) for column in self.columns} return cast( - Type[Model], + type[Model], create_model( # type: ignore "UntypedRow", __base__=Model, @@ -872,7 +866,7 @@ def as_polars(self) -> pl.DataFrame: @classmethod def read_csv( # type: ignore[no-untyped-def] - cls: Type[DF], + cls: type[DF], *args, # noqa: ANN002 **kwargs, # noqa: ANN003 ) -> DF: @@ -956,15 +950,13 @@ def read_csv( # type: ignore[no-untyped-def] # --- Type annotation overrides --- def filter( # noqa: D102 self: DF, - predicate: Union[ - pl.Expr, str, pl.Series, list[bool], np.ndarray[Any, Any], bool - ], + predicate: pl.Expr | str | pl.Series | list[bool] | np.ndarray[Any, Any] | bool, ) -> DF: return cast(DF, super().filter(predicate)) def select( # noqa: D102 self: DF, - *exprs: Union[IntoExpr, Iterable[IntoExpr]], + *exprs: IntoExpr | Iterable[IntoExpr], **named_exprs: IntoExpr, ) -> DF: return cast( # pyright: ignore[redundant-cast] @@ -973,7 +965,7 @@ def select( # noqa: D102 def with_columns( # noqa: D102 self: DF, - *exprs: Union[IntoExpr, Iterable[IntoExpr]], + *exprs: IntoExpr | Iterable[IntoExpr], **named_exprs: IntoExpr, ) -> DF: return cast(DF, super().with_columns(*exprs, **named_exprs)) diff --git a/src/patito/pydantic.py b/src/patito/pydantic.py index ba1b130..05aa1d4 100644 --- a/src/patito/pydantic.py +++ b/src/patito/pydantic.py @@ -3,27 +3,20 @@ from __future__ import annotations import itertools -from collections.abc import Iterable +from collections.abc import Iterable, Mapping, Sequence from datetime import date, datetime, time, timedelta from inspect import getfullargspec from typing import ( TYPE_CHECKING, Any, ClassVar, - Dict, - FrozenSet, - List, Literal, - Mapping, Optional, - Sequence, - Tuple, - Type, TypeVar, - Union, cast, get_args, ) +from zoneinfo import ZoneInfo import polars as pl from polars.datatypes import DataType, DataTypeClass @@ -35,7 +28,6 @@ from pydantic._internal._model_construction import ( ModelMetaclass as PydanticModelMetaclass, ) -from zoneinfo import ZoneInfo from patito._pydantic.column_info import ColumnInfo from patito._pydantic.dtypes import ( @@ -71,7 +63,7 @@ class ModelMetaclass(PydanticModelMetaclass): """ if TYPE_CHECKING: - model_fields: ClassVar[Dict[str, fields.FieldInfo]] + model_fields: ClassVar[dict[str, fields.FieldInfo]] def __init__(cls, name: str, bases: tuple, clsdict: dict, **kwargs) -> None: """Construct new patito model. @@ -99,12 +91,12 @@ def __hash__(self) -> int: return super().__hash__() @property - def column_infos(cls: Type[ModelType]) -> Mapping[str, ColumnInfo]: + def column_infos(cls: type[ModelType]) -> Mapping[str, ColumnInfo]: """Return column information for the model.""" return column_infos_for_model(cls) @property - def model_schema(cls: Type[ModelType]) -> Mapping[str, Mapping[str, Any]]: + def model_schema(cls: type[ModelType]) -> Mapping[str, Mapping[str, Any]]: """Return schema properties where definition references have been resolved. Returns: @@ -120,7 +112,7 @@ def model_schema(cls: Type[ModelType]) -> Mapping[str, Mapping[str, Any]]: return schema_for_model(cls) @property - def columns(cls: Type[ModelType]) -> List[str]: + def columns(cls: type[ModelType]) -> list[str]: """Return the name of the dataframe columns specified by the fields of the model. Returns: @@ -139,7 +131,7 @@ def columns(cls: Type[ModelType]) -> List[str]: return list(cls.model_fields.keys()) @property - def dtypes(cls: Type[ModelType]) -> dict[str, DataTypeClass | DataType]: + def dtypes(cls: type[ModelType]) -> dict[str, DataTypeClass | DataType]: """Return the polars dtypes of the dataframe. Unless Field(dtype=...) is specified, the highest signed column dtype @@ -163,8 +155,8 @@ def dtypes(cls: Type[ModelType]) -> dict[str, DataTypeClass | DataType]: @property def valid_dtypes( - cls: Type[ModelType], - ) -> Mapping[str, FrozenSet[DataTypeClass | DataType]]: + cls: type[ModelType], + ) -> Mapping[str, frozenset[DataTypeClass | DataType]]: """Return a list of polars dtypes which Patito considers valid for each field. The first item of each list is the default dtype chosen by Patito. @@ -180,7 +172,7 @@ def valid_dtypes( return valid_dtypes_for_model(cls) @property - def defaults(cls: Type[ModelType]) -> dict[str, Any]: + def defaults(cls: type[ModelType]) -> dict[str, Any]: """Return default field values specified on the model. Returns: @@ -205,7 +197,7 @@ def defaults(cls: Type[ModelType]) -> dict[str, Any]: } @property - def non_nullable_columns(cls: Type[ModelType]) -> set[str]: + def non_nullable_columns(cls: type[ModelType]) -> set[str]: """Return names of those columns that are non-nullable in the schema. Returns: @@ -234,7 +226,7 @@ def non_nullable_columns(cls: Type[ModelType]) -> set[str]: ) @property - def nullable_columns(cls: Type[ModelType]) -> set[str]: + def nullable_columns(cls: type[ModelType]) -> set[str]: """Return names of those columns that are nullable in the schema. Returns: @@ -256,7 +248,7 @@ def nullable_columns(cls: Type[ModelType]) -> set[str]: return set(cls.columns) - cls.non_nullable_columns @property - def unique_columns(cls: Type[ModelType]) -> set[str]: + def unique_columns(cls: type[ModelType]) -> set[str]: """Return columns with uniqueness constraint. Returns: @@ -279,7 +271,7 @@ def unique_columns(cls: Type[ModelType]) -> set[str]: return {column for column in cls.columns if infos[column].unique} @property - def derived_columns(cls: Type[ModelType]) -> set[str]: + def derived_columns(cls: type[ModelType]) -> set[str]: """Return set of columns which are derived from other columns.""" infos = cls.column_infos return { @@ -291,7 +283,7 @@ class Model(BaseModel, metaclass=ModelMetaclass): """Custom pydantic class for representing table schema and constructing rows.""" @classmethod - def validate_schema(cls: Type[ModelType]): + def validate_schema(cls: type[ModelType]): """Users should run this after defining or edit a model. We withhold the checks at model definition time to avoid expensive queries of the model schema.""" for column in cls.columns: col_info = cls.column_infos[column] @@ -305,8 +297,8 @@ def validate_schema(cls: Type[ModelType]): @classmethod def from_row( - cls: Type[ModelType], - row: Union["pd.DataFrame", pl.DataFrame], + cls: type[ModelType], + row: pd.DataFrame | pl.DataFrame, validate: bool = True, ) -> ModelType: """Represent a single data frame row as a Patito model. @@ -353,7 +345,7 @@ def from_row( @classmethod def _from_polars( - cls: Type[ModelType], + cls: type[ModelType], dataframe: pl.DataFrame, validate: bool = True, ) -> ModelType: @@ -413,9 +405,9 @@ def _from_polars( @classmethod def validate( - cls: Type[ModelType], - dataframe: Union["pd.DataFrame", pl.DataFrame], - columns: Optional[Sequence[str]] = None, + cls: type[ModelType], + dataframe: pd.DataFrame | pl.DataFrame, + columns: Sequence[str] | None = None, allow_missing_columns: bool = False, allow_superfluous_columns: bool = False, drop_superfluous_columns: bool = False, @@ -480,7 +472,7 @@ def validate( @classmethod def iter_models( - cls: Type[ModelType], dataframe: Union["pd.DataFrame", pl.DataFrame] + cls: type[ModelType], dataframe: pd.DataFrame | pl.DataFrame ) -> ModelGenerator[ModelType]: """Validate the dataframe and iterate over the rows, yielding Patito models. @@ -500,9 +492,9 @@ def iter_models( @classmethod def example_value( # noqa: C901 cls, - field: Optional[str] = None, - properties: Optional[Dict[str, Any]] = None, - ) -> Union[date, datetime, time, timedelta, float, int, str, None, Mapping, List]: + field: str | None = None, + properties: dict[str, Any] | None = None, + ) -> date | datetime | time | timedelta | float | int | str | None | Mapping | list: """Return a valid example value for the given model field. Args: @@ -665,7 +657,7 @@ def example_value( # noqa: C901 @classmethod def example( - cls: Type[ModelType], + cls: type[ModelType], **kwargs: Any, # noqa: ANN401 ) -> ModelType: """Produce model instance with filled dummy data for all unspecified fields. @@ -717,10 +709,10 @@ def example( @classmethod def pandas_examples( - cls: Type[ModelType], - data: Union[dict, Iterable], - columns: Optional[Iterable[str]] = None, - ) -> "pd.DataFrame": + cls: type[ModelType], + data: dict | Iterable, + columns: Iterable[str] | None = None, + ) -> pd.DataFrame: """Generate dataframe with dummy data for all unspecified columns. Offers the same API as the pandas.DataFrame constructor. @@ -787,10 +779,10 @@ def pandas_examples( @classmethod def examples( - cls: Type[ModelType], - data: Optional[Union[dict, Iterable]] = None, - columns: Optional[Iterable[str]] = None, - ) -> "patito.polars.DataFrame": + cls: type[ModelType], + data: dict | Iterable | None = None, + columns: Iterable[str] | None = None, + ) -> patito.polars.DataFrame: """Generate polars dataframe with dummy data for all unspecified columns. This constructor accepts the same data format as polars.DataFrame. @@ -859,7 +851,7 @@ def examples( if wrong_columns: raise TypeError(f"{cls.__name__} does not contain fields {wrong_columns}!") - series: List[Union[pl.Series, pl.Expr]] = [] + series: list[pl.Series | pl.Expr] = [] unique_series = [] for column_name, dtype in cls.dtypes.items(): if column_name not in kwargs: @@ -887,10 +879,10 @@ def examples( @classmethod def join( - cls: Type["Model"], - other: Type["Model"], + cls: type[Model], + other: type[Model], how: Literal["inner", "left", "outer", "asof", "cross", "semi", "anti"], - ) -> Type["Model"]: + ) -> type[Model]: """Dynamically create a new model compatible with an SQL Join operation. For instance, ``ModelA.join(ModelB, how="left")`` will create a model containing @@ -935,7 +927,7 @@ def join( if how in {"semi", "anti"}: return cls - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} for model, nullable_methods in ( (cls, {"outer"}), (other, {"left", "outer", "asof"}), @@ -955,9 +947,7 @@ def join( ) @classmethod - def select( - cls: Type[ModelType], fields: Union[str, Iterable[str]] - ) -> Type["Model"]: + def select(cls: type[ModelType], fields: str | Iterable[str]) -> type[Model]: """Create a new model consisting of only a subset of the model fields. Args: @@ -999,7 +989,7 @@ def select( ) @classmethod - def drop(cls: Type[ModelType], name: Union[str, Iterable[str]]) -> Type["Model"]: + def drop(cls: type[ModelType], name: str | Iterable[str]) -> type[Model]: """Return a new model where one or more fields are excluded. Args: @@ -1038,7 +1028,7 @@ def drop(cls: Type[ModelType], name: Union[str, Iterable[str]]) -> Type["Model"] ) @classmethod - def prefix(cls: Type[ModelType], prefix: str) -> Type["Model"]: + def prefix(cls: type[ModelType], prefix: str) -> type[Model]: """Return a new model where all field names have been prefixed. Args: @@ -1064,7 +1054,7 @@ def prefix(cls: Type[ModelType], prefix: str) -> Type["Model"]: ) @classmethod - def suffix(cls: Type[ModelType], suffix: str) -> Type["Model"]: + def suffix(cls: type[ModelType], suffix: str) -> type[Model]: """Return a new model where all field names have been suffixed. Args: @@ -1091,7 +1081,7 @@ def suffix(cls: Type[ModelType], suffix: str) -> Type["Model"]: ) @classmethod - def rename(cls: Type[ModelType], mapping: Dict[str, str]) -> Type["Model"]: + def rename(cls: type[ModelType], mapping: dict[str, str]) -> type[Model]: """Return a new model class where the specified fields have been renamed. Args: @@ -1132,9 +1122,9 @@ def rename(cls: Type[ModelType], mapping: Dict[str, str]) -> Type["Model"]: @classmethod def with_fields( - cls: Type[ModelType], + cls: type[ModelType], **field_definitions: Any, # noqa: ANN401 - ) -> Type["Model"]: + ) -> type[Model]: """Return a new model class where the given fields have been added. Args: @@ -1167,11 +1157,11 @@ def with_fields( ) @classmethod - def _schema_properties(cls: Type[ModelType]) -> Mapping[str, Any]: + def _schema_properties(cls: type[ModelType]) -> Mapping[str, Any]: return cls.model_schema["properties"] @classmethod - def _update_dfn(cls, annotation: Any, schema: Dict[str, Any]) -> None: + def _update_dfn(cls, annotation: Any, schema: dict[str, Any]) -> None: try: if issubclass(annotation, Model) and annotation.__name__ != cls.__name__: schema["$defs"][annotation.__name__] = annotation.model_schema @@ -1180,10 +1170,10 @@ def _update_dfn(cls, annotation: Any, schema: Dict[str, Any]) -> None: @classmethod def _derive_model( - cls: Type[ModelType], + cls: type[ModelType], model_name: str, - field_mapping: Dict[str, Any], - ) -> Type["Model"]: + field_mapping: dict[str, Any], + ) -> type[Model]: """Derive a new model with new field definitions. Args: @@ -1224,7 +1214,7 @@ def _derive_model( def _derive_field( field: fields.FieldInfo, make_nullable: bool = False, - ) -> Tuple[Type | None, fields.FieldInfo]: + ) -> tuple[type | None, fields.FieldInfo]: field_type = field.annotation default = field.default extra_attrs = { diff --git a/src/patito/validators.py b/src/patito/validators.py index 25edc47..5bf6fb0 100644 --- a/src/patito/validators.py +++ b/src/patito/validators.py @@ -2,13 +2,10 @@ from __future__ import annotations +from collections.abc import Sequence from typing import ( TYPE_CHECKING, Any, - Optional, - Sequence, - Type, - Union, cast, ) @@ -86,8 +83,8 @@ def _transform_df(dataframe: pl.DataFrame, schema: type[Model]) -> pl.DataFrame: def _find_errors( # noqa: C901 dataframe: pl.DataFrame, - schema: Type[Model], - columns: Optional[Sequence[str]] = None, + schema: type[Model], + columns: Sequence[str] | None = None, allow_missing_columns: bool = False, allow_superfluous_columns: bool = False, ) -> list[ErrorWrapper]: @@ -396,7 +393,7 @@ def _find_errors( # noqa: C901 def _find_enum_errors( - df: pl.DataFrame, column_name: str, props: dict[str, Any], schema: Type[Model] + df: pl.DataFrame, column_name: str, props: dict[str, Any], schema: type[Model] ) -> ErrorWrapper | None: if "enum" not in props: if "items" in props and "enum" in props["items"]: @@ -431,9 +428,9 @@ def _find_enum_errors( def validate( - dataframe: Union["pd.DataFrame", pl.DataFrame], - schema: Type[Model], - columns: Optional[Sequence[str]] = None, + dataframe: pd.DataFrame | pl.DataFrame, + schema: type[Model], + columns: Sequence[str] | None = None, allow_missing_columns: bool = False, allow_superfluous_columns: bool = False, drop_superfluous_columns: bool = False, diff --git a/tests/examples.py b/tests/examples.py index c487172..ca05dbb 100644 --- a/tests/examples.py +++ b/tests/examples.py @@ -1,7 +1,7 @@ """Testing examples.""" from datetime import date, datetime, time, timedelta -from typing import List, Literal, Optional +from typing import Literal, Optional import patito as pt import polars as pl @@ -58,6 +58,6 @@ class CompleteModel(pt.Model): pt_model_column: SmallModel - list_int_column: List[int] - list_str_column: List[str] - list_opt_column: List[Optional[int]] + list_int_column: list[int] + list_str_column: list[str] + list_opt_column: list[Optional[int]] diff --git a/tests/test_dummy_data.py b/tests/test_dummy_data.py index c0526f9..745dbed 100644 --- a/tests/test_dummy_data.py +++ b/tests/test_dummy_data.py @@ -1,7 +1,8 @@ """Test of functionality related to the generation of dummy data.""" +from collections.abc import Sequence from datetime import date, datetime -from typing import List, Literal, Optional, Sequence +from typing import Literal, Optional import patito as pt import polars as pl @@ -53,8 +54,8 @@ class MyModel(pt.Model): a: int b: Optional[str] c: Optional[int] - d: Optional[List[str]] = pt.Field(dtype=pl.List(pl.String)) - e: List[int] + d: Optional[list[str]] = pt.Field(dtype=pl.List(pl.String)) + e: list[int] f: int = pt.Field(ge=0) df = MyModel.examples({"a": [1, 2]}) diff --git a/tests/test_model.py b/tests/test_model.py index 710f991..9e263f4 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -6,7 +6,7 @@ import enum import re from datetime import date, datetime, time -from typing import Optional, Type +from typing import Optional import patito as pt import polars as pl @@ -252,13 +252,13 @@ def test_model_joins() -> None: class Left(pt.Model): left: int = pt.Field(gt=20) - opt_left: Optional[int] = None + opt_left: int | None = None class Right(pt.Model): right: int = pt.Field(gt=20) - opt_right: Optional[int] = None + opt_right: int | None = None - def test_model_validator(model: Type[pt.Model]) -> None: + def test_model_validator(model: type[pt.Model]) -> None: """Test if all field validators have been included correctly.""" with pytest.raises(ValidationError) as e: model(left=1, opt_left=1, right=1, opt_right=1) @@ -295,7 +295,7 @@ def test_model_selects() -> None: """It should produce models compatible with select statements.""" class MyModel(pt.Model): - a: Optional[int] + a: int | None b: int = pt.Field(gt=10) MySubModel = MyModel.select("b") @@ -321,7 +321,7 @@ def test_model_prefix_and_suffix() -> None: """It should produce models where all fields have been prefixed/suffixed.""" class MyModel(pt.Model): - a: Optional[int] + a: int | None b: str NewModel = MyModel.prefix("pre_").suffix("_post") @@ -333,7 +333,7 @@ def test_model_field_renaming() -> None: """It should be able to change its field names.""" class MyModel(pt.Model): - a: Optional[int] + a: int | None b: str NewModel = MyModel.rename({"b": "B"}) @@ -425,7 +425,7 @@ def validate_model_schema(schema) -> None: class ParentModel(pt.Model): a: int b: Model - c: Optional[float] = None + c: float | None = None schema = ParentModel.model_schema validate_model_schema( @@ -445,13 +445,13 @@ def test_nullable_columns() -> None: """Ensure columns are correctly nullable.""" class Test1(pt.Model): - foo: Optional[str] = pt.Field(dtype=pl.String) + foo: str | None = pt.Field(dtype=pl.String) assert Test1.nullable_columns == {"foo"} assert set(Test1.valid_dtypes["foo"]) == {pl.String} class Test2(pt.Model): - foo: Optional[int] = pt.Field(dtype=pl.UInt32) + foo: int | None = pt.Field(dtype=pl.UInt32) assert Test2.nullable_columns == {"foo"} assert set(Test2.valid_dtypes["foo"]) == {pl.UInt32} @@ -476,7 +476,7 @@ class Test2(pt.Model): with pytest.raises(ValueError, match="Invalid dtype UInt32"): class Test3(pt.Model): - foo: Optional[str] = pt.Field(dtype=pl.UInt32) + foo: str | None = pt.Field(dtype=pl.UInt32) Test3.validate_schema() @@ -536,7 +536,7 @@ class SubModel(pt.Model): class Test(pt.Model): a: int b: int - c: Optional[SubModel] + c: SubModel | None df = Test.examples({"a": range(5), "c": None}) Test.validate(df.cast()) diff --git a/tests/test_validators.py b/tests/test_validators.py index edfa25f..90e1460 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -6,7 +6,7 @@ import re import sys from datetime import date, datetime -from typing import List, Literal, Optional, Union +from typing import Literal, Optional, Union import patito as pt import polars as pl @@ -163,7 +163,7 @@ def test_validate_non_nullable_columns() -> None: class SmallModel(pt.Model): column_1: int - column_2: Optional[int] = None + column_2: int | None = None # We insert nulls into a non-optional column, causing an exception wrong_nulls_df = pl.DataFrame().with_columns( @@ -270,7 +270,7 @@ class NonCompatibleModel(pt.Model): with pytest.raises(ValueError, match="not compatible with any polars dtypes"): class NonCompatibleListModel(pt.Model): - my_field: List[object] + my_field: list[object] NonCompatibleListModel.validate_schema() @@ -383,7 +383,7 @@ class ABCEnum(enum.Enum): THREE = "c" class EnumModel(pt.Model): - column: Optional[ABCEnum] + column: ABCEnum | None valid_df = pl.DataFrame({"column": ["a", "b", "b", "c"]}) validate(dataframe=valid_df, schema=EnumModel) @@ -424,7 +424,7 @@ class EnumModel(pt.Model): assert errors[0] == error_expected class ListEnumModel(pt.Model): - column: List[Literal["a", "b", "c"]] + column: list[Literal["a", "b", "c"]] valid_df = pl.DataFrame({"column": [["a", "b"], ["b", "c"], ["a", "c"]]}) validate(dataframe=valid_df, schema=ListEnumModel) @@ -441,7 +441,7 @@ def test_optional_literal_enum_validation() -> None: """Test validation of optional typing.Literal-typed fields.""" class EnumModel(pt.Model): - column: Optional[Literal["a", "b", "c"]] + column: Literal["a", "b", "c"] | None valid_df = pl.DataFrame({"column": ["a", "b", "b", "c"]}) validate(dataframe=valid_df, schema=EnumModel) @@ -460,7 +460,7 @@ class EnumModel(pt.Model): assert errors[0] == error_expected class ListEnumModel(pt.Model): - column: List[Literal["a", "b", "c"]] + column: list[Literal["a", "b", "c"]] valid_df = pl.DataFrame({"column": [["a", "b"], ["b", "c"], ["a", "c"]]}) validate(dataframe=valid_df, schema=ListEnumModel) @@ -793,7 +793,7 @@ def test_optional_enum() -> None: class OptionalEnumModel(pt.Model): # Old type annotation syntax - optional_enum: Optional[Literal["A", "B"]] + optional_enum: Literal["A", "B"] | None df = pl.DataFrame({"optional_enum": ["A", "B", None]}) OptionalEnumModel.validate(df) @@ -808,7 +808,7 @@ def test_optional_pipe_operator() -> None: class OptionalEnumModel(pt.Model): # Old type annotation syntax - optional_enum_1: Optional[Literal["A", "B"]] + optional_enum_1: Literal["A", "B"] | None # New type annotation syntax optional_enum_2: Literal["A", "B"] | None # type: ignore @@ -831,10 +831,10 @@ def test_validation_of_list_dtypes() -> None: """It should be able to validate dtypes organized in lists.""" class ListModel(pt.Model): - int_list: List[int] - int_or_null_list: List[Optional[int]] - nullable_int_list: Optional[List[int]] - nullable_int_or_null_list: Optional[List[Optional[int]]] + int_list: list[int] + int_or_null_list: list[int | None] + nullable_int_list: list[int] | None + nullable_int_or_null_list: list[int | None] | None valid_df = pl.DataFrame( { @@ -866,7 +866,7 @@ def test_nested_field_attrs() -> None: """Ensure that constraints are respected even when embedded inside 'anyOf'.""" class Test(pt.Model): - foo: Optional[int] = pt.Field( + foo: int | None = pt.Field( dtype=pl.Int64, ge=0, le=100, constraints=pt.field.sum() == 100 ) From 6a61f47c6fb9ea570f2332a0b3c9a6ca15ebd742 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 17:19:14 +0200 Subject: [PATCH 08/11] Update what is run on pre-commit and remove unused plugins --- noxfile.py | 9 ++------- pyproject.toml | 9 --------- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/noxfile.py b/noxfile.py index 261bde8..34a54b3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -8,7 +8,7 @@ import nox # type: ignore nox.options.sessions = ( - # "lint", + "lint", "test", # "type_check" "docs", @@ -107,16 +107,11 @@ def lint(session): args = session.posargs or locations install_with_constraints( session, - "flake8", - "flake8-annotations", - "flake8-bandit", - "flake8-bugbear", - "flake8-docstrings", "darglint", "ruff", ) - session.run("flake8", *args) session.run("ruff check", *args) + session.run("ruff format", *args) @nox.session(python="3.12") diff --git a/pyproject.toml b/pyproject.toml index abb8124..fe25ba5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,12 +43,6 @@ docs = [ [tool.poetry.group.dev.dependencies] ruff = ">=0.2.1" coverage = {version = "*", extras = ["toml"]} -flake8 = "3.9.2" -flake8-annotations = { version = "*", python = ">=3.9,<4.0" } -flake8-bandit = "*" -flake8-black = "*" -flake8-bugbear = "*" -flake8-isort = "*" pyright = ">=1.1.239" pytest = ">=7.1.2" pytest-cov = ">=3.0.0" @@ -95,9 +89,6 @@ exclude_lines = [ fail_under = 99.64 show_missing = true -[tool.isort] -profile = "black" - [tool.pyright] typeCheckingMode = "basic" venvPath = "." From 2c56afbf47aa5dbece8864e3f909a15cb4556deb Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 17:22:46 +0200 Subject: [PATCH 09/11] update lockfile --- poetry.lock | 333 ++++++++-------------------------------------------- 1 file changed, 52 insertions(+), 281 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9d3b82c..3444e29 100644 --- a/poetry.lock +++ b/poetry.lock @@ -123,25 +123,6 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - [[package]] name = "babel" version = "2.16.0" @@ -156,30 +137,6 @@ files = [ [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "bandit" -version = "1.7.9" -description = "Security oriented static analyser for python code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bandit-1.7.9-py3-none-any.whl", hash = "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec"}, - {file = "bandit-1.7.9.tar.gz", hash = "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -PyYAML = ">=5.3.1" -rich = "*" -stevedore = ">=1.20.0" - -[package.extras] -baseline = ["GitPython (>=3.1.30)"] -sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] -toml = ["tomli (>=1.1.0)"] -yaml = ["PyYAML"] - [[package]] name = "beautifulsoup4" version = "4.12.3" @@ -753,123 +710,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] -[[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "flake8-annotations" -version = "2.9.1" -description = "Flake8 Type Annotation Checks" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "flake8-annotations-2.9.1.tar.gz", hash = "sha256:11f09efb99ae63c8f9d6b492b75fe147fbc323179fddfe00b2e56eefeca42f57"}, - {file = "flake8_annotations-2.9.1-py3-none-any.whl", hash = "sha256:a4385158a7a9fc8af1d8820a2f4c8d03387997006a83f5f8bfe5bc6085bdf88a"}, -] - -[package.dependencies] -attrs = ">=21.4" -flake8 = ">=3.7" - -[[package]] -name = "flake8-bandit" -version = "3.0.0" -description = "Automated security testing with bandit and flake8." -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-3.0.0-py2.py3-none-any.whl", hash = "sha256:61b617f4f7cdaa0e2b1e6bf7b68afb2b619a227bb3e3ae00dd36c213bd17900a"}, - {file = "flake8_bandit-3.0.0.tar.gz", hash = "sha256:54d19427e6a8d50322a7b02e1841c0a7c22d856975f3459803320e0e18e2d6a1"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = "*" -flake8-polyfill = "*" -pycodestyle = "*" - -[[package]] -name = "flake8-black" -version = "0.3.6" -description = "flake8 plugin to call black as a code style validator" -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-black-0.3.6.tar.gz", hash = "sha256:0dfbca3274777792a5bcb2af887a4cad72c72d0e86c94e08e3a3de151bb41c34"}, - {file = "flake8_black-0.3.6-py3-none-any.whl", hash = "sha256:fe8ea2eca98d8a504f22040d9117347f6b367458366952862ac3586e7d4eeaca"}, -] - -[package.dependencies] -black = ">=22.1.0" -flake8 = ">=3" -tomli = {version = "*", markers = "python_version < \"3.11\""} - -[package.extras] -develop = ["build", "twine"] - -[[package]] -name = "flake8-bugbear" -version = "23.3.12" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-bugbear-23.3.12.tar.gz", hash = "sha256:e3e7f74c8a49ad3794a7183353026dabd68c74030d5f46571f84c1fb0eb79363"}, - {file = "flake8_bugbear-23.3.12-py3-none-any.whl", hash = "sha256:beb5c7efcd7ccc2039ef66a77bb8db925e7be3531ff1cb4d0b7030d0e2113d72"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-isort" -version = "6.1.1" -description = "flake8 plugin that integrates isort" -optional = false -python-versions = ">=3.8" -files = [ - {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, - {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, -] - -[package.dependencies] -flake8 = "*" -isort = ">=5.0.0,<6" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "flake8-polyfill" -version = "1.0.2" -description = "Polyfill package for Flake8 plugins" -optional = false -python-versions = "*" -files = [ - {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, - {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, -] - -[package.dependencies] -flake8 = "*" - [[package]] name = "h11" version = "0.14.0" @@ -1038,20 +878,6 @@ qtconsole = ["qtconsole"] test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - [[package]] name = "jedi" version = "0.19.1" @@ -1238,17 +1064,6 @@ files = [ [package.dependencies] traitlets = "*" -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] - [[package]] name = "mdurl" version = "0.1.2" @@ -1565,17 +1380,6 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "pbr" -version = "6.0.0" -description = "Python Build Reasonableness" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, - {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, -] - [[package]] name = "pexpect" version = "4.9.0" @@ -1799,17 +1603,6 @@ numpy = ">=1.16.6" [package.extras] test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] - [[package]] name = "pycparser" version = "2.22" @@ -1944,17 +1737,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] - [[package]] name = "pygments" version = "2.18.0" @@ -1971,13 +1753,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyright" -version = "1.1.375" +version = "1.1.376" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.375-py3-none-any.whl", hash = "sha256:4c5e27eddeaee8b41cc3120736a1dda6ae120edf8523bb2446b6073a52f286e3"}, - {file = "pyright-1.1.375.tar.gz", hash = "sha256:7765557b0d6782b2fadabff455da2014476404c9e9214f49977a4e49dec19a0f"}, + {file = "pyright-1.1.376-py3-none-any.whl", hash = "sha256:0f2473b12c15c46b3207f0eec224c3cea2bdc07cd45dd4a037687cbbca0fbeff"}, + {file = "pyright-1.1.376.tar.gz", hash = "sha256:bffd63b197cd0810395bb3245c06b01f95a85ddf6bfa0e5644ed69c841e954dd"}, ] [package.dependencies] @@ -2374,13 +2156,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = true python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] @@ -2683,20 +2465,6 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] -[[package]] -name = "stevedore" -version = "5.2.0" -description = "Manage dynamic plugins for Python applications" -optional = false -python-versions = ">=3.8" -files = [ - {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, - {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, -] - -[package.dependencies] -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "tomli" version = "2.0.1" @@ -2756,13 +2524,13 @@ files = [ [[package]] name = "types-setuptools" -version = "71.1.0.20240806" +version = "71.1.0.20240813" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-71.1.0.20240806.tar.gz", hash = "sha256:ae5e7b4d643ab9e99fc00ac00041804118cabe72a56183c30d524fb064897ad6"}, - {file = "types_setuptools-71.1.0.20240806-py3-none-any.whl", hash = "sha256:3bd8dd02039be0bb79ad880d8893b8eefcb022fabbeeb61245c61b20c9ab1ed0"}, + {file = "types-setuptools-71.1.0.20240813.tar.gz", hash = "sha256:94ff4f0af18c7c24ac88932bcb0f5655fb7187a001b7c61e53a1bfdaf9877b54"}, + {file = "types_setuptools-71.1.0.20240813-py3-none-any.whl", hash = "sha256:d9d9ba2936f5d3b47b59ae9bf65942a60063ac1d6bbee180a8a79fbb43f22ce5"}, ] [[package]] @@ -2806,13 +2574,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.30.5" +version = "0.30.6" description = "The lightning-fast ASGI server." optional = true python-versions = ">=3.8" files = [ - {file = "uvicorn-0.30.5-py3-none-any.whl", hash = "sha256:b2d86de274726e9878188fa07576c9ceeff90a839e2b6e25c917fe05f5a6c835"}, - {file = "uvicorn-0.30.5.tar.gz", hash = "sha256:ac6fdbd4425c5fd17a9fe39daf4d4d075da6fdc80f653e5894cdc2fd98752bee"}, + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, ] [package.dependencies] @@ -2845,43 +2613,46 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "4.0.1" +version = "4.0.2" description = "Filesystem events monitoring" optional = false python-versions = ">=3.8" files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [package.extras] @@ -3114,13 +2885,13 @@ tests-strict = ["pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pyt [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -3135,4 +2906,4 @@ pandas = ["pandas"] [metadata] lock-version = "2.0" python-versions = ">=3.9" -content-hash = "ef32917f99ff25db175974e568410d90b675fa0e0db04dbb496631095c0a48dc" +content-hash = "84527e4dbc652addb80e0cb29826237967d12fad4725cd0f0afd1a7029307471" From 688fe13f1589103c7263b6f94de9fe6ee43a5284 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 19:05:03 +0200 Subject: [PATCH 10/11] Add noqa UP007 to BaseModel to prevent transforming to X | Y annotations --- tests/test_model.py | 20 ++++++++++---------- tests/test_validators.py | 28 ++++++++++++++-------------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/tests/test_model.py b/tests/test_model.py index 9e263f4..2d47498 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -252,11 +252,11 @@ def test_model_joins() -> None: class Left(pt.Model): left: int = pt.Field(gt=20) - opt_left: int | None = None + opt_left: Optional[int] = None # noqa: UP007 class Right(pt.Model): right: int = pt.Field(gt=20) - opt_right: int | None = None + opt_right: Optional[int] = None # noqa: UP007 def test_model_validator(model: type[pt.Model]) -> None: """Test if all field validators have been included correctly.""" @@ -295,7 +295,7 @@ def test_model_selects() -> None: """It should produce models compatible with select statements.""" class MyModel(pt.Model): - a: int | None + a: Optional[int] # noqa: UP007 b: int = pt.Field(gt=10) MySubModel = MyModel.select("b") @@ -321,7 +321,7 @@ def test_model_prefix_and_suffix() -> None: """It should produce models where all fields have been prefixed/suffixed.""" class MyModel(pt.Model): - a: int | None + a: Optional[int] # noqa: UP007 b: str NewModel = MyModel.prefix("pre_").suffix("_post") @@ -333,7 +333,7 @@ def test_model_field_renaming() -> None: """It should be able to change its field names.""" class MyModel(pt.Model): - a: int | None + a: Optional[int] # noqa: UP007 b: str NewModel = MyModel.rename({"b": "B"}) @@ -425,7 +425,7 @@ def validate_model_schema(schema) -> None: class ParentModel(pt.Model): a: int b: Model - c: float | None = None + c: Optional[float] = None # noqa: UP007 schema = ParentModel.model_schema validate_model_schema( @@ -445,13 +445,13 @@ def test_nullable_columns() -> None: """Ensure columns are correctly nullable.""" class Test1(pt.Model): - foo: str | None = pt.Field(dtype=pl.String) + foo: Optional[str] = pt.Field(dtype=pl.String) # noqa: UP007 assert Test1.nullable_columns == {"foo"} assert set(Test1.valid_dtypes["foo"]) == {pl.String} class Test2(pt.Model): - foo: int | None = pt.Field(dtype=pl.UInt32) + foo: Optional[int] = pt.Field(dtype=pl.UInt32) # noqa: UP007 assert Test2.nullable_columns == {"foo"} assert set(Test2.valid_dtypes["foo"]) == {pl.UInt32} @@ -476,7 +476,7 @@ class Test2(pt.Model): with pytest.raises(ValueError, match="Invalid dtype UInt32"): class Test3(pt.Model): - foo: str | None = pt.Field(dtype=pl.UInt32) + foo: Optional[str] = pt.Field(dtype=pl.UInt32) # noqa: UP007 Test3.validate_schema() @@ -536,7 +536,7 @@ class SubModel(pt.Model): class Test(pt.Model): a: int b: int - c: SubModel | None + c: Optional[SubModel] # noqa: UP007 df = Test.examples({"a": range(5), "c": None}) Test.validate(df.cast()) diff --git a/tests/test_validators.py b/tests/test_validators.py index 90e1460..186da34 100644 --- a/tests/test_validators.py +++ b/tests/test_validators.py @@ -33,12 +33,12 @@ def test_is_optional() -> None: ) def test_is_optional_with_pipe_operator() -> None: """It should return True for optional types.""" - assert is_optional(int | None) # typing: ignore # pragma: noqa # pyright: ignore + assert is_optional(Optional[int]) # noqa: UP007 def test_dewrap_optional() -> None: """It should return the inner type of Optional types.""" - assert unwrap_optional(Optional[int]) is int + assert unwrap_optional(Optional[int]) is int # noqa: UP007 assert unwrap_optional(Union[int, None]) is int assert unwrap_optional(int) is int @@ -49,8 +49,8 @@ def test_dewrap_optional() -> None: ) def test_dewrap_optional_with_pipe_operator() -> None: """It should return the inner type of Optional types.""" - assert ( # typing: ignore # pragma: noqa # pyright: ignore - unwrap_optional(int | None) is int + assert ( + unwrap_optional(Optional[int]) is int # noqa: UP007 ) @@ -163,7 +163,7 @@ def test_validate_non_nullable_columns() -> None: class SmallModel(pt.Model): column_1: int - column_2: int | None = None + column_2: Optional[int] = None # noqa: UP007 # We insert nulls into a non-optional column, causing an exception wrong_nulls_df = pl.DataFrame().with_columns( @@ -383,7 +383,7 @@ class ABCEnum(enum.Enum): THREE = "c" class EnumModel(pt.Model): - column: ABCEnum | None + column: Optional[ABCEnum] # noqa: UP007 valid_df = pl.DataFrame({"column": ["a", "b", "b", "c"]}) validate(dataframe=valid_df, schema=EnumModel) @@ -441,7 +441,7 @@ def test_optional_literal_enum_validation() -> None: """Test validation of optional typing.Literal-typed fields.""" class EnumModel(pt.Model): - column: Literal["a", "b", "c"] | None + column: Optional[Literal["a", "b", "c"]] # noqa: UP007 valid_df = pl.DataFrame({"column": ["a", "b", "b", "c"]}) validate(dataframe=valid_df, schema=EnumModel) @@ -793,7 +793,7 @@ def test_optional_enum() -> None: class OptionalEnumModel(pt.Model): # Old type annotation syntax - optional_enum: Literal["A", "B"] | None + optional_enum: Optional[Literal["A", "B"]] # noqa: UP007 df = pl.DataFrame({"optional_enum": ["A", "B", None]}) OptionalEnumModel.validate(df) @@ -808,9 +808,9 @@ def test_optional_pipe_operator() -> None: class OptionalEnumModel(pt.Model): # Old type annotation syntax - optional_enum_1: Literal["A", "B"] | None + optional_enum_1: Optional[Literal["A", "B"]] # noqa: UP007 # New type annotation syntax - optional_enum_2: Literal["A", "B"] | None # type: ignore + optional_enum_2: Optional[Literal["A", "B"]] # noqa: UP007 df = pl.DataFrame( { @@ -832,9 +832,9 @@ def test_validation_of_list_dtypes() -> None: class ListModel(pt.Model): int_list: list[int] - int_or_null_list: list[int | None] - nullable_int_list: list[int] | None - nullable_int_or_null_list: list[int | None] | None + int_or_null_list: list[Optional[int]] # noqa: UP007 + nullable_int_list: Optional[list[int]] # noqa: UP007 + nullable_int_or_null_list: Optional[list[Optional[int]]] # noqa: UP007 valid_df = pl.DataFrame( { @@ -866,7 +866,7 @@ def test_nested_field_attrs() -> None: """Ensure that constraints are respected even when embedded inside 'anyOf'.""" class Test(pt.Model): - foo: int | None = pt.Field( + foo: Optional[int] = pt.Field( # noqa: UP007 dtype=pl.Int64, ge=0, le=100, constraints=pt.field.sum() == 100 ) From 6dc6ccd04a440e3f9f950f4c48f1c04d5a81fb95 Mon Sep 17 00:00:00 2001 From: Thomas Aarholt Date: Wed, 14 Aug 2024 19:41:50 +0200 Subject: [PATCH 11/11] Run ruff correctly --- noxfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index 34a54b3..e5bb49c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -110,8 +110,8 @@ def lint(session): "darglint", "ruff", ) - session.run("ruff check", *args) - session.run("ruff format", *args) + session.run("ruff", "check", *args) + session.run("ruff", "format", *args) @nox.session(python="3.12")