Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,13 @@ async def another_calculation(url):

### Decorator Parameters

| Parameter | Type | Default | Description |
| ---------------- | ----------------- | ------- | ---------------------------------------------------- |
| `ttl` | `int \| float` | `300` | Time to live for cached items in seconds |
| `never_die` | `bool` | `False` | If True, cache refreshes automatically in background |
| `cache_key_func` | `Callable` | `None` | Custom function to generate cache keys |
| `ignore_fields` | `tuple[str, ...]` | `()` | Function parameters to exclude from cache key |
| Parameter | Type | Default | Description |
| ---------------- | --------------- | ------- | -------------------------------------------------------------- |
| `ttl` | `int \| float` | `300` | Time to live for cached items in seconds |
| `never_die` | `bool` | `False` | If True, cache refreshes automatically in background |
| `cache_key_func` | `Callable` | `None` | Custom function to generate cache keys |
| `ignore_fields` | `Sequence[str]` | `()` | Function parameters to exclude from cache key |
| `no_self` | `bool` | `False` | If True, ignores the first parameter (usually `self` or `cls`) |

### Custom Cache Key Function

Expand Down
9 changes: 5 additions & 4 deletions cachify/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,13 @@

__all__ = [
"__version__",
"cache",
"rcache",
"redis_cache",
"setup_redis_config",
"clear_never_die_registry",
"cache",
"DEFAULT_KEY_PREFIX",
"get_redis_config",
"reset_redis_config",
"DEFAULT_KEY_PREFIX",
"setup_redis_config",
"redis_cache",
"CacheKwargs",
]
20 changes: 14 additions & 6 deletions cachify/cache.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import functools
import inspect
from typing import Any, Callable, cast
from typing import Any, Callable, Sequence, cast

from cachify.features.never_die import register_never_die_function
from cachify.types import CacheConfig, CacheKeyFunction, F, Number
Expand Down Expand Up @@ -73,7 +73,8 @@ def base_cache(
ttl: Number,
never_die: bool,
cache_key_func: CacheKeyFunction | None,
ignore_fields: tuple[str, ...],
ignore_fields: Sequence[str],
no_self: bool,
config: CacheConfig,
) -> Callable[[F], F]:
"""
Expand All @@ -83,33 +84,40 @@ def base_cache(
ttl: Time to live for cached items in seconds
never_die: If True, the cache will never expire and will be recalculated based on the ttl
cache_key_func: Custom cache key function, used for more complex cache scenarios
ignore_fields: Tuple of strings with the function params to ignore when creating the cache key
ignore_fields: Sequence of strings with the function params to ignore when creating the cache key
no_self: if True, the first parameter (typically 'self' for methods) will be ignored when creating the cache key
config: Cache configuration specifying storage, locks, and never_die registration

Features:
- Works for both sync and async functions
- Only allows one execution at a time per function+args
- Makes subsequent calls wait for the first call to complete
"""
if cache_key_func and ignore_fields:

if cache_key_func and (ignore_fields or no_self):
raise ValueError("Either cache_key_func or ignore_fields can be provided, but not both")

def decorator(function: F) -> F:
ignore = tuple(ignore_fields)

if no_self:
ignore += function.__code__.co_varnames[:1]

if inspect.iscoroutinefunction(function):
return _async_decorator(
function=function,
ttl=ttl,
never_die=never_die,
cache_key_func=cache_key_func,
ignore_fields=ignore_fields,
ignore_fields=ignore,
config=config,
)
return _sync_decorator(
function=function,
ttl=ttl,
never_die=never_die,
cache_key_func=cache_key_func,
ignore_fields=ignore_fields,
ignore_fields=ignore,
config=config,
)

Expand Down
6 changes: 3 additions & 3 deletions cachify/features/never_die.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,19 +140,19 @@ def _refresh_never_die_caches():

if entry.loop.is_closed():
logger.debug(
f"Loop is closed, skipping future creation",
"Loop is closed, skipping future creation",
extra={"function": entry.function.__qualname__},
exc_info=True,
)
continue

coroutine = _run_async_function_and_cache(entry)
try:
coroutine = _run_async_function_and_cache(entry)
future = asyncio.run_coroutine_threadsafe(coroutine, entry.loop)
except RuntimeError:
coroutine.close()
logger.debug(
f"Loop is closed, skipping future creation",
"Loop is closed, skipping future creation",
extra={"function": entry.function.__qualname__},
exc_info=True,
)
Expand Down
17 changes: 11 additions & 6 deletions cachify/memory_cache.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
import asyncio
import threading
from typing import Callable
from collections import defaultdict
from typing import Callable, Sequence

from cachify.cache import base_cache
from cachify.storage.memory_storage import MemoryStorage
from cachify.types import CacheConfig, CacheKeyFunction, F, Number
from cachify.utils.locks import ASYNC_LOCKS, SYNC_LOCKS

_CACHE_CLEAR_THREAD: threading.Thread | None = None
_CACHE_CLEAR_LOCK: threading.Lock = threading.Lock()

_ASYNC_LOCKS: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
_SYNC_LOCKS: defaultdict[str, threading.Lock] = defaultdict(threading.Lock)

_MEMORY_CONFIG = CacheConfig(
storage=MemoryStorage,
sync_lock=lambda cache_key: SYNC_LOCKS[cache_key],
async_lock=lambda cache_key: ASYNC_LOCKS[cache_key],
sync_lock=_SYNC_LOCKS.__getitem__,
async_lock=_ASYNC_LOCKS.__getitem__,
)


Expand All @@ -30,8 +34,9 @@ def cache(
ttl: Number = 300,
never_die: bool = False,
cache_key_func: CacheKeyFunction | None = None,
ignore_fields: tuple[str, ...] = (),
ignore_fields: Sequence[str] = (),
no_self: bool = False,
) -> Callable[[F], F]:
"""In-memory cache decorator. See `base_cache` for full documentation."""
_start_cache_clear_thread()
return base_cache(ttl, never_die, cache_key_func, ignore_fields, _MEMORY_CONFIG)
return base_cache(ttl, never_die, cache_key_func, ignore_fields, no_self, _MEMORY_CONFIG)
7 changes: 4 additions & 3 deletions cachify/redis_cache.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Callable
from typing import Callable, Sequence

from cachify.cache import base_cache
from cachify.redis.lock import RedisLockManager
Expand All @@ -16,12 +16,13 @@ def redis_cache(
ttl: Number = 300,
never_die: bool = False,
cache_key_func: CacheKeyFunction | None = None,
ignore_fields: tuple[str, ...] = (),
ignore_fields: Sequence[str] = (),
no_self: bool = False,
) -> Callable[[F], F]:
"""
Redis cache decorator. See `base_cache` for full documentation.

Requires setup_redis_config() to be called before use.
Uses Redis for distributed caching across multiple processes/machines.
"""
return base_cache(ttl, never_die, cache_key_func, ignore_fields, _REDIS_CONFIG)
return base_cache(ttl, never_die, cache_key_func, ignore_fields, no_self, _REDIS_CONFIG)
15 changes: 5 additions & 10 deletions cachify/utils/arguments.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
import hashlib
import inspect
import pickle
from collections.abc import Callable, Generator
from inspect import Signature
from typing import Any

from cachify.types import CacheKeyFunction
from cachify.utils.errors import CacheKeyError
from cachify.utils.functions import get_function_id


def _cache_key_fingerprint(value: object) -> str:
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
return hashlib.blake2b(payload, digest_size=16).hexdigest()
from cachify.utils.hash import object_hash


def _iter_arguments(
Expand Down Expand Up @@ -54,12 +49,12 @@ def create_cache_key(
if not cache_key_func:
function_signature = inspect.signature(function)
items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
return f"{function_id}:{_cache_key_fingerprint(items)}"
return f"{function_id}:{object_hash(items)}"

cache_key = cache_key_func(args, kwargs)
try:
return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
return f"{function_id}:{object_hash(cache_key)}"
except TypeError as exc:
raise ValueError(
raise CacheKeyError(
"Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
) from exc
44 changes: 0 additions & 44 deletions cachify/utils/decorator_factory.py

This file was deleted.

2 changes: 2 additions & 0 deletions cachify/utils/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class CacheKeyError(ValueError):
pass
18 changes: 18 additions & 0 deletions cachify/utils/hash.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import hashlib
import pickle
from typing import Any

from cachify.utils.errors import CacheKeyError


def object_hash(value: Any) -> str:
try:
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)

except Exception as exc:
raise CacheKeyError(
"Unable to serialize object for hashing - ensure all parts of the object are pickleable. "
"Hint: create a custom __reduce__ method for the suspected object if necessary."
) from exc

return hashlib.blake2b(payload, digest_size=16).hexdigest()
6 changes: 0 additions & 6 deletions cachify/utils/locks.py

This file was deleted.

78 changes: 76 additions & 2 deletions pytest/tests/mutable_arguments/test_async_mutable_args.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import pytest
import socket
from collections.abc import Callable

from cachify.storage.memory_storage import MemoryStorage
import pytest
from cachify.memory_cache import cache
from cachify.storage.memory_storage import MemoryStorage
from cachify.utils.errors import CacheKeyError

TTL = 0.1

Expand Down Expand Up @@ -467,3 +469,75 @@ async def cached_func(items: list) -> int:
# Should still hit cache with [1, 2]
assert result1 == result2
assert call_count == 1


class TestUnpicklableArguments:
"""Tests for caching behavior with unpicklable arguments."""

@pytest.fixture(autouse=True)
def clear_cache(self):
MemoryStorage.clear()

class ReducePickleableObject:
def __init__(self, value):
self.value = value
self.callback = lambda: None

def __reduce__(self):
return (self.__class__, (self.value,))

@pytest.mark.asyncio
async def test_function_argument_raises_type_error(self):
@cache(ttl=TTL)
async def cached_func(func: Callable) -> int:
return 42

with pytest.raises(CacheKeyError):
await cached_func(lambda: None)

@pytest.mark.asyncio
async def test_open_file_argument_raises_type_error(self):
@cache(ttl=TTL)
async def cached_func(sock: socket.socket) -> int:
return 42

with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
with pytest.raises(CacheKeyError):
await cached_func(s)

@pytest.mark.asyncio
async def test_custom_object_argument(self):
call_count = 0

@cache(ttl=TTL)
async def cached_func(obj: TestUnpicklableArguments.ReducePickleableObject) -> int:
nonlocal call_count
call_count += 1
return call_count

result1 = await cached_func(self.ReducePickleableObject(10))
result2 = await cached_func(self.ReducePickleableObject(10))

assert result1 == result2
assert call_count == 1

@pytest.mark.asyncio
async def test_custom_object_with_no_self(self):
call_count = 0

class UnpickleableObject:
def __init__(self):
self.callback = lambda: None

@cache(ttl=TTL, no_self=True)
async def cached_method(self, value: int) -> int:
nonlocal call_count
call_count += 1
return call_count

obj = UnpickleableObject()
result1 = await obj.cached_method(42)
result2 = await obj.cached_method(42)

assert result1 == result2
assert call_count == 1
Loading