Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: on-set callback #39

Merged
merged 3 commits into from
Nov 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/cacheout/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

__version__ = "0.15.0"

from .cache import Cache, RemovalCause
from .cache import UNSET, Cache, RemovalCause
from .fifo import FIFOCache
from .lfu import LFUCache
from .lifo import LIFOCache
Expand Down
25 changes: 20 additions & 5 deletions src/cacheout/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@
#: the default), and `exists` is whether the cache key exists or not.
T_ON_GET_CALLBACK = t.Optional[t.Callable[[t.Hashable, t.Any, bool], None]]

#: Callback that will be executed when a cache entry is set.

#: It is called with arguments ``(key, new_value, old_value)`` where `key` is the cache key,
#: `new_value` is the value is set,
#: and `old_value` is the value is replaced(if the key didn't exist before, it's ``UNSET``).
Comment on lines +37 to +41
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Some minor changes to comment:

#: Callback that will be executed when a cache entry is set. It is called with arguments
#: ``(key, new_value, old_value)`` where `key` is the cache key, `new_value` is the value is set,
#: and `old_value` is the value it replaced (if the key didn't exist before, it's :const:`UNSET`).

T_ON_SET_CALLBACK = t.Optional[t.Callable[[t.Hashable, t.Any, t.Any], None]]

#: Callback that will be executed when a cache entry is removed. It is called with arguments
#: ``(key, value, cause)`` where `key` is the cache key, `value` is the cached value at the time of
#: deletion, and `cause` is the reason the key was removed (see :class:`RemovalCause` for enumerated
Expand All @@ -50,15 +57,13 @@ class RemovalCause(Enum):

Attributes:
DELETE: indicates that the cache entry was deleted by delete() or delete_many() explicitly.
SET: indicates that the cache entry was replaced with a new value by set() or set_many().
EXPIRED: indicates that the cache entry was removed because it expired.
FULL: indicates that the cache entry was removed because cache has been full (reached the
maximum size limit).
POPITEM: indicates that the cache entry was deleted by popitem().
"""

DELETE = auto()
SET = auto()
EXPIRED = auto()
FULL = auto()
POPITEM = auto()
Expand Down Expand Up @@ -94,6 +99,8 @@ class Cache:
cache key.
on_get: Callback which will be executed when a cache entry is retrieved.
See :class:`T_ON_GET_CALLBACK` for details.
on_set: Callback which will be executed when a cache entry is set.
See :class:`T_ON_SET_CALLBACK` for details.
on_delete: Callback which will be executed when a cache entry is removed.
See :class:`T_ON_DELETE_CALLBACK` for details.
stats: Cache statistics.
Expand All @@ -112,13 +119,15 @@ def __init__(
default: t.Any = None,
enable_stats: bool = False,
on_get: T_ON_GET_CALLBACK = None,
on_set: T_ON_SET_CALLBACK = None,
on_delete: T_ON_DELETE_CALLBACK = None,
):
self.maxsize = maxsize
self.ttl = ttl
self.timer = timer
self.default = default
self.on_get = on_get
self.on_set = on_set
self.on_delete = on_delete
self.stats = CacheStatsTracker(self, enable=enable_stats)

Expand Down Expand Up @@ -381,17 +390,23 @@ def _set(self, key: t.Hashable, value: t.Any, ttl: t.Optional[T_TTL] = None) ->
if ttl is None:
ttl = self.ttl

old_value = UNSET
if key not in self._cache:
self.evict()
else:
old_value = self._cache[key]

# Delete key before setting it so that it moves to the end of the OrderedDict key list.
# Needed for cache strategies that rely on the ordering of when keys were last inserted.
self._delete(key, RemovalCause.SET)
self._delete(key)
self._cache[key] = value

if ttl and ttl > 0:
self._expire_times[key] = self.timer() + ttl

if self.on_set:
self.on_set(key, value, old_value)

def set_many(self, items: t.Mapping, ttl: t.Optional[T_TTL] = None) -> None:
"""
Set multiple cache keys at once.
Expand Down Expand Up @@ -421,13 +436,13 @@ def delete(self, key: t.Hashable) -> int:
with self._lock:
return self._delete(key, RemovalCause.DELETE)

def _delete(self, key: t.Hashable, cause: RemovalCause) -> int:
def _delete(self, key: t.Hashable, cause: t.Optional[RemovalCause] = None) -> int:
count = 0

try:
value = self._cache[key]
del self._cache[key]
if self.on_delete:
if cause and self.on_delete:
self.on_delete(key, value, cause)
count = 1
if cause == RemovalCause.FULL:
Expand Down
2 changes: 1 addition & 1 deletion src/cacheout/lfu.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def add(self, key: t.Hashable, value: t.Any, ttl: t.Optional[T_TTL] = None) -> N

add.__doc__ = Cache.add.__doc__

def _delete(self, key: t.Hashable, cause: RemovalCause) -> int:
def _delete(self, key: t.Hashable, cause: t.Optional[RemovalCause] = None) -> int:
count = super()._delete(key, cause)

try:
Expand Down
23 changes: 18 additions & 5 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import pytest

from cacheout import Cache, RemovalCause
from cacheout import UNSET, Cache, RemovalCause


parametrize = pytest.mark.parametrize
Expand Down Expand Up @@ -726,10 +726,6 @@ def on_delete(key, value, cause):
cache.delete("DELETE")
assert log == f"DELETE=1, RemovalCause={RemovalCause.DELETE.value}"

cache.set("SET", 1)
cache.set("SET", 2)
assert log == f"SET=1, RemovalCause={RemovalCause.SET.value}"

cache.clear()
cache.set("POPITEM", 1)
cache.popitem()
Expand Down Expand Up @@ -765,6 +761,23 @@ def on_get(key, value, existed):
assert log == "miss=None, existed=False"


def test_cache_on_set(cache: Cache):
"""Test that on_set(cache) callback."""
log = {}

def on_set(key, new_value, old_value):
nonlocal log
log = {"key": key, "new_value": new_value, "old_value": old_value}

cache.on_set = on_set

cache.set("a", 1)
assert log == {"key": "a", "new_value": 1, "old_value": UNSET}

cache.set("a", 2)
assert log == {"key": "a", "new_value": 2, "old_value": 1}


def test_cache_stats__disabled_by_default(cache: Cache):
"""Test that cache stats are disabled by default."""
assert cache.stats.is_enabled() is False
Expand Down