Skip to content

Commit

Permalink
drop python 3.8 - support 3.12 (#114)
Browse files Browse the repository at this point in the history
* drop python 3.8 - support 3.12

* fix 3.9 typing

* use optional

* use optional and union for pydantic only

* use future annotations

* upgrade tests
  • Loading branch information
malmans2 authored Feb 14, 2024
1 parent e5a7567 commit 9e84b5b
Show file tree
Hide file tree
Showing 19 changed files with 153 additions and 128 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/on-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,9 @@ jobs:
strategy:
matrix:
include:
- python-version: '3.8'
- python-version: '3.9'
extra: -ci
- python-version: '3.12'
extra: -ci
- python-version: '3.11'
extra: -integration
Expand Down
1 change: 1 addition & 0 deletions cacholote/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations

import functools
import json
Expand Down
41 changes: 20 additions & 21 deletions cacholote/clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations

import collections
import datetime
import functools
import json
import posixpath
from typing import Any, Callable, Dict, List, Literal, Optional, Set, Union
from typing import Any, Callable, Literal, Optional

import pydantic
import sqlalchemy as sa
Expand All @@ -29,10 +30,10 @@


def _delete_cache_file(
obj: Dict[str, Any],
session: Optional[sa.orm.Session] = None,
cache_entry_id: Optional[int] = None,
sizes: Optional[Dict[str, int]] = None,
obj: dict[str, Any],
session: sa.orm.Session | None = None,
cache_entry_id: int | None = None,
sizes: dict[str, int] | None = None,
dry_run: bool = False,
) -> Any:
logger = config.get().logger
Expand Down Expand Up @@ -82,9 +83,7 @@ def _delete_cache_entry(
json.loads(cache_entry._result_as_string, object_hook=_delete_cache_file)


def delete(
func_to_del: Union[str, Callable[..., Any]], *args: Any, **kwargs: Any
) -> None:
def delete(func_to_del: str | Callable[..., Any], *args: Any, **kwargs: Any) -> None:
"""Delete function previously cached.
Parameters
Expand Down Expand Up @@ -112,7 +111,7 @@ def __init__(self) -> None:
urldir = self.fs.unstrip_protocol(self.dirname)

self.logger.info("get disk usage of cache files")
self.sizes: Dict[str, int] = collections.defaultdict(lambda: 0)
self.sizes: dict[str, int] = collections.defaultdict(int)
for path, size in self.fs.du(self.dirname, total=False).items():
# Group dirs
urlpath = self.fs.unstrip_protocol(path)
Expand All @@ -129,7 +128,7 @@ def size(self) -> int:
def stop_cleaning(self, maxsize: int) -> bool:
return self.size <= maxsize

def get_unknown_files(self, lock_validity_period: Optional[float]) -> Set[str]:
def get_unknown_files(self, lock_validity_period: float | None) -> set[str]:
self.logger.info("get unknown files")

utcnow = utils.utcnow()
Expand Down Expand Up @@ -161,7 +160,7 @@ def get_unknown_files(self, lock_validity_period: Optional[float]) -> Set[str]:
return set(unknown_sizes)

def delete_unknown_files(
self, lock_validity_period: Optional[float], recursive: bool
self, lock_validity_period: float | None, recursive: bool
) -> None:
for urlpath in self.get_unknown_files(lock_validity_period):
size = self.sizes.pop(urlpath)
Expand All @@ -174,9 +173,9 @@ def delete_unknown_files(
@staticmethod
@pydantic.validate_call
def _get_tag_filters(
tags_to_clean: Optional[List[Optional[str]]],
tags_to_keep: Optional[List[Optional[str]]],
) -> List[sa.ColumnElement[bool]]:
tags_to_clean: Optional[list[Optional[str]]],
tags_to_keep: Optional[list[Optional[str]]],
) -> list[sa.ColumnElement[bool]]:
if (tags_to_clean is not None) and (tags_to_keep is not None):
raise ValueError("tags_to_clean/keep are mutually exclusive.")

Expand Down Expand Up @@ -205,8 +204,8 @@ def _get_tag_filters(
@pydantic.validate_call
def _get_method_sorters(
method: Literal["LRU", "LFU"],
) -> List[sa.orm.InstrumentedAttribute[Any]]:
sorters: List[sa.orm.InstrumentedAttribute[Any]] = []
) -> list[sa.orm.InstrumentedAttribute[Any]]:
sorters: list[sa.orm.InstrumentedAttribute[Any]] = []
if method == "LRU":
sorters.extend([database.CacheEntry.timestamp, database.CacheEntry.counter])
elif method == "LFU":
Expand All @@ -220,8 +219,8 @@ def delete_cache_files(
self,
maxsize: int,
method: Literal["LRU", "LFU"],
tags_to_clean: Optional[List[Optional[str]]],
tags_to_keep: Optional[List[Optional[str]]],
tags_to_clean: list[str | None] | None,
tags_to_keep: list[str | None] | None,
) -> None:
filters = self._get_tag_filters(tags_to_clean, tags_to_keep)
sorters = self._get_method_sorters(method)
Expand Down Expand Up @@ -269,9 +268,9 @@ def clean_cache_files(
method: Literal["LRU", "LFU"] = "LRU",
delete_unknown_files: bool = False,
recursive: bool = False,
lock_validity_period: Optional[float] = None,
tags_to_clean: Optional[List[Optional[str]]] = None,
tags_to_keep: Optional[List[Optional[str]]] = None,
lock_validity_period: float | None = None,
tags_to_clean: list[str | None] | None = None,
tags_to_keep: list[str | None] | None = None,
) -> None:
"""Clean cache files.
Expand Down
30 changes: 15 additions & 15 deletions cacholote/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import pathlib
import tempfile
from types import TracebackType
from typing import Any, Dict, Literal, Optional, Tuple, Type, Union
from typing import Any, Literal, Optional, Union

import fsspec
import pydantic
Expand All @@ -31,7 +31,7 @@

from . import database

_SETTINGS: Optional[Settings] = None
_SETTINGS: Settings | None = None
_DEFAULT_CACHE_DIR = pathlib.Path(tempfile.gettempdir()) / "cacholote"
_DEFAULT_CACHE_DIR.mkdir(exist_ok=True)
_DEFAULT_CACHE_DB_URLPATH = f"sqlite:///{_DEFAULT_CACHE_DIR / 'cacholote.db'}"
Expand All @@ -44,11 +44,11 @@
class Settings(pydantic_settings.BaseSettings):
use_cache: bool = True
cache_db_urlpath: Optional[str] = _DEFAULT_CACHE_DB_URLPATH
create_engine_kwargs: Dict[str, Any] = {}
sessionmaker: Optional[sa.orm.sessionmaker] = None # type: ignore[type-arg]
create_engine_kwargs: dict[str, Any] = {}
sessionmaker: Optional[sa.orm.sessionmaker[sa.orm.Session]] = None
cache_files_urlpath: str = _DEFAULT_CACHE_FILES_URLPATH
cache_files_urlpath_readonly: Optional[str] = None
cache_files_storage_options: Dict[str, Any] = {}
cache_files_storage_options: dict[str, Any] = {}
xarray_cache_type: Literal[
"application/netcdf", "application/x-grib", "application/vnd+zarr"
] = "application/netcdf"
Expand All @@ -64,23 +64,23 @@ class Settings(pydantic_settings.BaseSettings):

@pydantic.field_validator("create_engine_kwargs")
def validate_create_engine_kwargs(
cls: pydantic_settings.BaseSettings, create_engine_kwargs: Dict[str, Any]
) -> Dict[str, Any]:
cls: pydantic_settings.BaseSettings, create_engine_kwargs: dict[str, Any]
) -> dict[str, Any]:
poolclass = create_engine_kwargs.get("poolclass")
if isinstance(poolclass, str):
create_engine_kwargs["poolclass"] = getattr(sa.pool, poolclass)
return create_engine_kwargs

@pydantic.field_validator("expiration")
def validate_expiration(
cls: pydantic_settings.BaseSettings, expiration: Optional[datetime.datetime]
) -> Optional[datetime.datetime]:
cls: pydantic_settings.BaseSettings, expiration: datetime.datetime | None
) -> datetime.datetime | None:
if expiration is not None and expiration.tzinfo is None:
raise ValueError(f"Expiration is missing the timezone info. {expiration=}")
return expiration

@pydantic.model_validator(mode="after")
def make_cache_dir(self) -> "Settings":
def make_cache_dir(self) -> Settings:
fs, _, (urlpath, *_) = fsspec.get_fs_token_paths(
self.cache_files_urlpath,
storage_options=self.cache_files_storage_options,
Expand All @@ -89,7 +89,7 @@ def make_cache_dir(self) -> "Settings":
return self

@property
def instantiated_sessionmaker(self) -> sa.orm.sessionmaker: # type: ignore[type-arg]
def instantiated_sessionmaker(self) -> sa.orm.sessionmaker[sa.orm.Session]:
if self.sessionmaker is None:
if self.cache_db_urlpath is None:
raise ValueError("Provide either `sessionmaker` or `cache_db_urlpath`.")
Expand Down Expand Up @@ -173,15 +173,15 @@ def __enter__(self) -> Settings:

def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
global _SETTINGS
_SETTINGS = self._old_settings


def reset(env_file: Optional[Union[str, Tuple[str]]] = None) -> None:
def reset(env_file: str | tuple[str] | None = None) -> None:
"""Reset cacholote settings.
Priority:
Expand Down
15 changes: 9 additions & 6 deletions cacholote/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations

import datetime
import functools
import json
import warnings
from typing import Any, Dict
from typing import Any

import sqlalchemy as sa
import sqlalchemy.orm
Expand Down Expand Up @@ -77,7 +78,7 @@ def _commit_or_rollback(session: sa.orm.Session) -> None:
session.rollback()


def _encode_kwargs(**kwargs: Any) -> Dict[str, Any]:
def _encode_kwargs(**kwargs: Any) -> dict[str, Any]:
encoded_kwargs = {}
for key, value in kwargs.items():
if isinstance(value, dict):
Expand All @@ -87,7 +88,7 @@ def _encode_kwargs(**kwargs: Any) -> Dict[str, Any]:
return encoded_kwargs


def _decode_kwargs(**kwargs: Any) -> Dict[str, Any]:
def _decode_kwargs(**kwargs: Any) -> dict[str, Any]:
decoded_kwargs = {}
for key, value in kwargs.items():
if key.startswith("_encoded_"):
Expand All @@ -97,12 +98,14 @@ def _decode_kwargs(**kwargs: Any) -> Dict[str, Any]:
return decoded_kwargs


@functools.lru_cache()
def _cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker: # type: ignore[type-arg]
@functools.lru_cache
def _cached_sessionmaker(
url: str, **kwargs: Any
) -> sa.orm.sessionmaker[sa.orm.Session]:
engine = sa.create_engine(url, **_decode_kwargs(**kwargs))
Base.metadata.create_all(engine)
return sa.orm.sessionmaker(engine)


def cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker: # type: ignore[type-arg]
def cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker[sa.orm.Session]:
return _cached_sessionmaker(url, **_encode_kwargs(**kwargs))
14 changes: 7 additions & 7 deletions cacholote/decode.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import importlib
import json
from typing import Any, Callable, Dict, List, Union
from typing import Any, Callable


def import_object(fully_qualified_name: str) -> Any:
Expand All @@ -37,13 +37,13 @@ class DecodeError(Exception):
pass


def decode_python_object(obj: Dict[str, Any]) -> Any:
def decode_python_object(obj: dict[str, Any]) -> Any:
if obj.get("type") == "python_object" and "fully_qualified_name" in obj:
return import_object(obj["fully_qualified_name"])
return None


def decode_python_call(obj: Dict[str, Any]) -> Any:
def decode_python_call(obj: dict[str, Any]) -> Any:
if obj.get("type") == "python_call" and "callable" in obj:
if callable(obj["callable"]):
func = obj["callable"]
Expand All @@ -55,13 +55,13 @@ def decode_python_call(obj: Dict[str, Any]) -> Any:
return None


FILECACHE_DECODERS: List[Callable[[Dict[str, Any]], Any]] = [
FILECACHE_DECODERS: list[Callable[[dict[str, Any]], Any]] = [
decode_python_object,
decode_python_call,
]


def object_hook(obj: Dict[str, Any]) -> Any:
def object_hook(obj: dict[str, Any]) -> Any:
"""Decode deserialized JSON data (``dict``)."""
for decoder in reversed(FILECACHE_DECODERS):
try:
Expand All @@ -74,7 +74,7 @@ def object_hook(obj: Dict[str, Any]) -> Any:
return obj


def loads(obj: Union[str, bytes, bytearray], **kwargs: Any) -> Any:
def loads(obj: str | bytes | bytearray, **kwargs: Any) -> Any:
"""Decode serialized JSON data to a python object.
Parameters
Expand Down
Loading

0 comments on commit 9e84b5b

Please sign in to comment.