Skip to content

Commit 9e84b5b

Browse files
authored
drop python 3.8 - support 3.12 (#114)
* drop python 3.8 - support 3.12 * fix 3.9 typing * use optional * use optional and union for pydantic only * use future annotations * upgrade tests
1 parent e5a7567 commit 9e84b5b

19 files changed

+153
-128
lines changed

.github/workflows/on-push.yml

+3-1
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,9 @@ jobs:
149149
strategy:
150150
matrix:
151151
include:
152-
- python-version: '3.8'
152+
- python-version: '3.9'
153+
extra: -ci
154+
- python-version: '3.12'
153155
extra: -ci
154156
- python-version: '3.11'
155157
extra: -integration

cacholote/cache.py

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
# See the License for the specific language governing permissions and
1616
# limitations under the License.
17+
from __future__ import annotations
1718

1819
import functools
1920
import json

cacholote/clean.py

+20-21
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,14 @@
1313
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
16+
from __future__ import annotations
1617

1718
import collections
1819
import datetime
1920
import functools
2021
import json
2122
import posixpath
22-
from typing import Any, Callable, Dict, List, Literal, Optional, Set, Union
23+
from typing import Any, Callable, Literal, Optional
2324

2425
import pydantic
2526
import sqlalchemy as sa
@@ -29,10 +30,10 @@
2930

3031

3132
def _delete_cache_file(
32-
obj: Dict[str, Any],
33-
session: Optional[sa.orm.Session] = None,
34-
cache_entry_id: Optional[int] = None,
35-
sizes: Optional[Dict[str, int]] = None,
33+
obj: dict[str, Any],
34+
session: sa.orm.Session | None = None,
35+
cache_entry_id: int | None = None,
36+
sizes: dict[str, int] | None = None,
3637
dry_run: bool = False,
3738
) -> Any:
3839
logger = config.get().logger
@@ -82,9 +83,7 @@ def _delete_cache_entry(
8283
json.loads(cache_entry._result_as_string, object_hook=_delete_cache_file)
8384

8485

85-
def delete(
86-
func_to_del: Union[str, Callable[..., Any]], *args: Any, **kwargs: Any
87-
) -> None:
86+
def delete(func_to_del: str | Callable[..., Any], *args: Any, **kwargs: Any) -> None:
8887
"""Delete function previously cached.
8988
9089
Parameters
@@ -112,7 +111,7 @@ def __init__(self) -> None:
112111
urldir = self.fs.unstrip_protocol(self.dirname)
113112

114113
self.logger.info("get disk usage of cache files")
115-
self.sizes: Dict[str, int] = collections.defaultdict(lambda: 0)
114+
self.sizes: dict[str, int] = collections.defaultdict(int)
116115
for path, size in self.fs.du(self.dirname, total=False).items():
117116
# Group dirs
118117
urlpath = self.fs.unstrip_protocol(path)
@@ -129,7 +128,7 @@ def size(self) -> int:
129128
def stop_cleaning(self, maxsize: int) -> bool:
130129
return self.size <= maxsize
131130

132-
def get_unknown_files(self, lock_validity_period: Optional[float]) -> Set[str]:
131+
def get_unknown_files(self, lock_validity_period: float | None) -> set[str]:
133132
self.logger.info("get unknown files")
134133

135134
utcnow = utils.utcnow()
@@ -161,7 +160,7 @@ def get_unknown_files(self, lock_validity_period: Optional[float]) -> Set[str]:
161160
return set(unknown_sizes)
162161

163162
def delete_unknown_files(
164-
self, lock_validity_period: Optional[float], recursive: bool
163+
self, lock_validity_period: float | None, recursive: bool
165164
) -> None:
166165
for urlpath in self.get_unknown_files(lock_validity_period):
167166
size = self.sizes.pop(urlpath)
@@ -174,9 +173,9 @@ def delete_unknown_files(
174173
@staticmethod
175174
@pydantic.validate_call
176175
def _get_tag_filters(
177-
tags_to_clean: Optional[List[Optional[str]]],
178-
tags_to_keep: Optional[List[Optional[str]]],
179-
) -> List[sa.ColumnElement[bool]]:
176+
tags_to_clean: Optional[list[Optional[str]]],
177+
tags_to_keep: Optional[list[Optional[str]]],
178+
) -> list[sa.ColumnElement[bool]]:
180179
if (tags_to_clean is not None) and (tags_to_keep is not None):
181180
raise ValueError("tags_to_clean/keep are mutually exclusive.")
182181

@@ -205,8 +204,8 @@ def _get_tag_filters(
205204
@pydantic.validate_call
206205
def _get_method_sorters(
207206
method: Literal["LRU", "LFU"],
208-
) -> List[sa.orm.InstrumentedAttribute[Any]]:
209-
sorters: List[sa.orm.InstrumentedAttribute[Any]] = []
207+
) -> list[sa.orm.InstrumentedAttribute[Any]]:
208+
sorters: list[sa.orm.InstrumentedAttribute[Any]] = []
210209
if method == "LRU":
211210
sorters.extend([database.CacheEntry.timestamp, database.CacheEntry.counter])
212211
elif method == "LFU":
@@ -220,8 +219,8 @@ def delete_cache_files(
220219
self,
221220
maxsize: int,
222221
method: Literal["LRU", "LFU"],
223-
tags_to_clean: Optional[List[Optional[str]]],
224-
tags_to_keep: Optional[List[Optional[str]]],
222+
tags_to_clean: list[str | None] | None,
223+
tags_to_keep: list[str | None] | None,
225224
) -> None:
226225
filters = self._get_tag_filters(tags_to_clean, tags_to_keep)
227226
sorters = self._get_method_sorters(method)
@@ -269,9 +268,9 @@ def clean_cache_files(
269268
method: Literal["LRU", "LFU"] = "LRU",
270269
delete_unknown_files: bool = False,
271270
recursive: bool = False,
272-
lock_validity_period: Optional[float] = None,
273-
tags_to_clean: Optional[List[Optional[str]]] = None,
274-
tags_to_keep: Optional[List[Optional[str]]] = None,
271+
lock_validity_period: float | None = None,
272+
tags_to_clean: list[str | None] | None = None,
273+
tags_to_keep: list[str | None] | None = None,
275274
) -> None:
276275
"""Clean cache files.
277276

cacholote/config.py

+15-15
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import pathlib
2121
import tempfile
2222
from types import TracebackType
23-
from typing import Any, Dict, Literal, Optional, Tuple, Type, Union
23+
from typing import Any, Literal, Optional, Union
2424

2525
import fsspec
2626
import pydantic
@@ -31,7 +31,7 @@
3131

3232
from . import database
3333

34-
_SETTINGS: Optional[Settings] = None
34+
_SETTINGS: Settings | None = None
3535
_DEFAULT_CACHE_DIR = pathlib.Path(tempfile.gettempdir()) / "cacholote"
3636
_DEFAULT_CACHE_DIR.mkdir(exist_ok=True)
3737
_DEFAULT_CACHE_DB_URLPATH = f"sqlite:///{_DEFAULT_CACHE_DIR / 'cacholote.db'}"
@@ -44,11 +44,11 @@
4444
class Settings(pydantic_settings.BaseSettings):
4545
use_cache: bool = True
4646
cache_db_urlpath: Optional[str] = _DEFAULT_CACHE_DB_URLPATH
47-
create_engine_kwargs: Dict[str, Any] = {}
48-
sessionmaker: Optional[sa.orm.sessionmaker] = None # type: ignore[type-arg]
47+
create_engine_kwargs: dict[str, Any] = {}
48+
sessionmaker: Optional[sa.orm.sessionmaker[sa.orm.Session]] = None
4949
cache_files_urlpath: str = _DEFAULT_CACHE_FILES_URLPATH
5050
cache_files_urlpath_readonly: Optional[str] = None
51-
cache_files_storage_options: Dict[str, Any] = {}
51+
cache_files_storage_options: dict[str, Any] = {}
5252
xarray_cache_type: Literal[
5353
"application/netcdf", "application/x-grib", "application/vnd+zarr"
5454
] = "application/netcdf"
@@ -64,23 +64,23 @@ class Settings(pydantic_settings.BaseSettings):
6464

6565
@pydantic.field_validator("create_engine_kwargs")
6666
def validate_create_engine_kwargs(
67-
cls: pydantic_settings.BaseSettings, create_engine_kwargs: Dict[str, Any]
68-
) -> Dict[str, Any]:
67+
cls: pydantic_settings.BaseSettings, create_engine_kwargs: dict[str, Any]
68+
) -> dict[str, Any]:
6969
poolclass = create_engine_kwargs.get("poolclass")
7070
if isinstance(poolclass, str):
7171
create_engine_kwargs["poolclass"] = getattr(sa.pool, poolclass)
7272
return create_engine_kwargs
7373

7474
@pydantic.field_validator("expiration")
7575
def validate_expiration(
76-
cls: pydantic_settings.BaseSettings, expiration: Optional[datetime.datetime]
77-
) -> Optional[datetime.datetime]:
76+
cls: pydantic_settings.BaseSettings, expiration: datetime.datetime | None
77+
) -> datetime.datetime | None:
7878
if expiration is not None and expiration.tzinfo is None:
7979
raise ValueError(f"Expiration is missing the timezone info. {expiration=}")
8080
return expiration
8181

8282
@pydantic.model_validator(mode="after")
83-
def make_cache_dir(self) -> "Settings":
83+
def make_cache_dir(self) -> Settings:
8484
fs, _, (urlpath, *_) = fsspec.get_fs_token_paths(
8585
self.cache_files_urlpath,
8686
storage_options=self.cache_files_storage_options,
@@ -89,7 +89,7 @@ def make_cache_dir(self) -> "Settings":
8989
return self
9090

9191
@property
92-
def instantiated_sessionmaker(self) -> sa.orm.sessionmaker: # type: ignore[type-arg]
92+
def instantiated_sessionmaker(self) -> sa.orm.sessionmaker[sa.orm.Session]:
9393
if self.sessionmaker is None:
9494
if self.cache_db_urlpath is None:
9595
raise ValueError("Provide either `sessionmaker` or `cache_db_urlpath`.")
@@ -173,15 +173,15 @@ def __enter__(self) -> Settings:
173173

174174
def __exit__(
175175
self,
176-
exc_type: Optional[Type[BaseException]],
177-
exc_val: Optional[BaseException],
178-
exc_tb: Optional[TracebackType],
176+
exc_type: type[BaseException] | None,
177+
exc_val: BaseException | None,
178+
exc_tb: TracebackType | None,
179179
) -> None:
180180
global _SETTINGS
181181
_SETTINGS = self._old_settings
182182

183183

184-
def reset(env_file: Optional[Union[str, Tuple[str]]] = None) -> None:
184+
def reset(env_file: str | tuple[str] | None = None) -> None:
185185
"""Reset cacholote settings.
186186
187187
Priority:

cacholote/database.py

+9-6
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,13 @@
1313
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
16+
from __future__ import annotations
1617

1718
import datetime
1819
import functools
1920
import json
2021
import warnings
21-
from typing import Any, Dict
22+
from typing import Any
2223

2324
import sqlalchemy as sa
2425
import sqlalchemy.orm
@@ -77,7 +78,7 @@ def _commit_or_rollback(session: sa.orm.Session) -> None:
7778
session.rollback()
7879

7980

80-
def _encode_kwargs(**kwargs: Any) -> Dict[str, Any]:
81+
def _encode_kwargs(**kwargs: Any) -> dict[str, Any]:
8182
encoded_kwargs = {}
8283
for key, value in kwargs.items():
8384
if isinstance(value, dict):
@@ -87,7 +88,7 @@ def _encode_kwargs(**kwargs: Any) -> Dict[str, Any]:
8788
return encoded_kwargs
8889

8990

90-
def _decode_kwargs(**kwargs: Any) -> Dict[str, Any]:
91+
def _decode_kwargs(**kwargs: Any) -> dict[str, Any]:
9192
decoded_kwargs = {}
9293
for key, value in kwargs.items():
9394
if key.startswith("_encoded_"):
@@ -97,12 +98,14 @@ def _decode_kwargs(**kwargs: Any) -> Dict[str, Any]:
9798
return decoded_kwargs
9899

99100

100-
@functools.lru_cache()
101-
def _cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker: # type: ignore[type-arg]
101+
@functools.lru_cache
102+
def _cached_sessionmaker(
103+
url: str, **kwargs: Any
104+
) -> sa.orm.sessionmaker[sa.orm.Session]:
102105
engine = sa.create_engine(url, **_decode_kwargs(**kwargs))
103106
Base.metadata.create_all(engine)
104107
return sa.orm.sessionmaker(engine)
105108

106109

107-
def cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker: # type: ignore[type-arg]
110+
def cached_sessionmaker(url: str, **kwargs: Any) -> sa.orm.sessionmaker[sa.orm.Session]:
108111
return _cached_sessionmaker(url, **_encode_kwargs(**kwargs))

cacholote/decode.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,11 @@
1414
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
# See the License for the specific language governing permissions and
1616
# limitations under the License.
17-
17+
from __future__ import annotations
1818

1919
import importlib
2020
import json
21-
from typing import Any, Callable, Dict, List, Union
21+
from typing import Any, Callable
2222

2323

2424
def import_object(fully_qualified_name: str) -> Any:
@@ -37,13 +37,13 @@ class DecodeError(Exception):
3737
pass
3838

3939

40-
def decode_python_object(obj: Dict[str, Any]) -> Any:
40+
def decode_python_object(obj: dict[str, Any]) -> Any:
4141
if obj.get("type") == "python_object" and "fully_qualified_name" in obj:
4242
return import_object(obj["fully_qualified_name"])
4343
return None
4444

4545

46-
def decode_python_call(obj: Dict[str, Any]) -> Any:
46+
def decode_python_call(obj: dict[str, Any]) -> Any:
4747
if obj.get("type") == "python_call" and "callable" in obj:
4848
if callable(obj["callable"]):
4949
func = obj["callable"]
@@ -55,13 +55,13 @@ def decode_python_call(obj: Dict[str, Any]) -> Any:
5555
return None
5656

5757

58-
FILECACHE_DECODERS: List[Callable[[Dict[str, Any]], Any]] = [
58+
FILECACHE_DECODERS: list[Callable[[dict[str, Any]], Any]] = [
5959
decode_python_object,
6060
decode_python_call,
6161
]
6262

6363

64-
def object_hook(obj: Dict[str, Any]) -> Any:
64+
def object_hook(obj: dict[str, Any]) -> Any:
6565
"""Decode deserialized JSON data (``dict``)."""
6666
for decoder in reversed(FILECACHE_DECODERS):
6767
try:
@@ -74,7 +74,7 @@ def object_hook(obj: Dict[str, Any]) -> Any:
7474
return obj
7575

7676

77-
def loads(obj: Union[str, bytes, bytearray], **kwargs: Any) -> Any:
77+
def loads(obj: str | bytes | bytearray, **kwargs: Any) -> Any:
7878
"""Decode serialized JSON data to a python object.
7979
8080
Parameters

0 commit comments

Comments
 (0)