Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cachify/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def _async_decorator(
@functools.wraps(function)
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
skip_cache = kwargs.pop("skip_cache", False)
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs, config.process_isolated)

if cache_entry := await config.storage.aget(cache_key, skip_cache):
return cache_entry.result
Expand Down Expand Up @@ -49,7 +49,7 @@ def _sync_decorator(
@functools.wraps(function)
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
skip_cache = kwargs.pop("skip_cache", False)
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs)
cache_key = create_cache_key(function, cache_key_func, ignore_fields, args, kwargs, config.process_isolated)

if cache_entry := config.storage.get(cache_key, skip_cache):
return cache_entry.result
Expand Down
1 change: 1 addition & 0 deletions cachify/features/never_die.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def cache_key(self) -> str:
self.ignore_fields,
self.args,
self.kwargs,
self.config.process_isolated,
)

def __eq__(self, other: Any) -> bool:
Expand Down
1 change: 1 addition & 0 deletions cachify/memory_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
storage=MemoryStorage,
sync_lock=lambda cache_key: SYNC_LOCKS[cache_key],
async_lock=lambda cache_key: ASYNC_LOCKS[cache_key],
process_isolated=True,
)


Expand Down
1 change: 1 addition & 0 deletions cachify/redis_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
storage=RedisStorage,
sync_lock=RedisLockManager.sync_lock,
async_lock=RedisLockManager.async_lock,
process_isolated=False,
)


Expand Down
3 changes: 2 additions & 1 deletion cachify/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,12 @@ def is_expired(self) -> bool:

@dataclass(frozen=True, slots=True)
class CacheConfig:
"""Configuration for cache, grouping storage, lock, and never_die registration."""
"""Configuration for cache, grouping storage, lock, never_die registration and process-isolated indicator."""

storage: "CacheStorage"
sync_lock: Callable[[str], ContextManager]
async_lock: Callable[[str], AsyncContextManager]
process_isolated: bool


class CacheEntryProtocol(Protocol):
Expand Down
67 changes: 62 additions & 5 deletions cachify/utils/arguments.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,75 @@
import hashlib
import inspect
import pickle
from collections.abc import Callable, Generator
from collections.abc import Callable, Generator, Mapping, Sequence, Set
from contextlib import suppress
from inspect import Signature
from typing import Any

from cachify.types import CacheKeyFunction
from cachify.utils.functions import get_function_id


def _cache_key_fingerprint(value: object) -> str:
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)
def _process_isolated_fingerprint(value: Any) -> str:
stack = [value]
seen = set()
result = hashlib.blake2b(digest_size=16)

while stack:
current = stack.pop()
current_id = id(current)

if current_id in seen:
continue

with suppress(TypeError, AttributeError, pickle.PicklingError):
result.update(pickle.dumps(current, protocol=pickle.HIGHEST_PROTOCOL))
continue

# Handle code objects to differentiate different lambda functions
with suppress(AttributeError, TypeError):
result.update(current.__code__.co_code)
continue

if isinstance(current, Sequence):
seen.add(current_id)
stack.extend(current)
continue

if isinstance(current, Set):
seen.add(current_id)
stack.extend(sorted(current))
continue

if isinstance(current, Mapping):
seen.add(current_id)
stack.extend(current.items())
continue

# Last resort: use the id of the object
result.update(current_id.to_bytes(8, "big", signed=True))

return result.hexdigest()


def _process_shared_fingerprint(value: Any) -> str:
try:
payload = pickle.dumps(value, protocol=pickle.HIGHEST_PROTOCOL)

except (pickle.PicklingError, TypeError, AttributeError) as exc:
raise ValueError(
"Process-shared cache key contains non-picklable items - Consider ignoring suspect fields"
) from exc

return hashlib.blake2b(payload, digest_size=16).hexdigest()


def _cache_key_fingerprint(value: Any, process_isolated: bool) -> str:
if process_isolated:
return _process_isolated_fingerprint(value)
return _process_shared_fingerprint(value)


def _iter_arguments(
function_signature: Signature,
args: tuple,
Expand Down Expand Up @@ -48,17 +104,18 @@ def create_cache_key(
ignore_fields: tuple[str, ...],
args: tuple,
kwargs: dict,
process_isolated: bool,
) -> str:
function_id = get_function_id(function)

if not cache_key_func:
function_signature = inspect.signature(function)
items = tuple(_iter_arguments(function_signature, args, kwargs, ignore_fields))
return f"{function_id}:{_cache_key_fingerprint(items)}"
return f"{function_id}:{_cache_key_fingerprint(items, process_isolated)}"

cache_key = cache_key_func(args, kwargs)
try:
return f"{function_id}:{_cache_key_fingerprint(cache_key)}"
return f"{function_id}:{_cache_key_fingerprint(cache_key, process_isolated)}"
except TypeError as exc:
raise ValueError(
"Cache key function must return a hashable cache key - be careful with mutable types (list, dict, set) and non built-in types"
Expand Down
44 changes: 0 additions & 44 deletions cachify/utils/decorator_factory.py

This file was deleted.

129 changes: 129 additions & 0 deletions pytest/tests/redis/test_redis_unpickleable.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import threading
import pytest
import redis

from cachify import redis_cache


class TestUnpickleableArgumentsRaiseError:
"""Tests that unpickleable arguments raise clear errors with Redis cache."""

def test_lambda_argument_raises_value_error(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError for lambda arguments."""

@redis_cache(ttl=60)
def cached_func(func) -> int:
return 1

with pytest.raises(ValueError):
cached_func(lambda x: x * 2)

def test_lock_argument_raises_value_error(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError for Lock arguments."""

@redis_cache(ttl=60)
def cached_func(lock: threading.Lock) -> int:
return 1

with pytest.raises(ValueError):
cached_func(threading.Lock())

def test_nested_unpickleable_raises_value_error(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError for nested unpickleable objects."""

@redis_cache(ttl=60)
def cached_func(data: dict) -> int:
return 1

with pytest.raises(ValueError):
cached_func({"nested": {"lambda": lambda: None}})

def test_deeply_nested_unpickleable_raises_value_error(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError for deeply nested unpickleable objects."""

@redis_cache(ttl=60)
def cached_func(data: dict) -> int:
return 1

with pytest.raises(ValueError):
cached_func({"level1": {"level2": {"level3": [lambda: None, 1, 2]}}})

def test_mixed_nested_structures_with_unpickleable_raises(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError for mixed nested structures with unpickleable."""

@redis_cache(ttl=60)
def cached_func(data: dict) -> int:
return 1

mixed_data = {
"list": [1, {"inner_dict": [threading.Lock(), 2, 3]}, 4],
"tuple": (5, (6, {"deep": threading.Lock()})),
"simple": "value",
}

with pytest.raises(ValueError):
cached_func(mixed_data)


class TestCacheKeyFuncUnpickleableRaisesError:
"""Tests that cache_key_func returning unpickleable raises clear errors."""

def test_cache_key_func_returning_lambda_raises(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError when cache_key_func returns unpickleable."""
key_lambda = lambda x: x # noqa: E731

@redis_cache(ttl=60, cache_key_func=lambda args, kwargs: key_lambda)
def cached_func(value: int) -> int:
return value

with pytest.raises(ValueError):
cached_func(1)

def test_cache_key_func_with_unpickleable_in_tuple_raises(self, setup_sync_redis: redis.Redis):
"""Redis cache should raise ValueError when cache_key_func returns tuple with unpickleable."""
lock = threading.Lock()

@redis_cache(ttl=60, cache_key_func=lambda args, kwargs: (args[0], lock))
def cached_func(value: int) -> int:
return value

with pytest.raises(ValueError):
cached_func(1)


class TestIgnoreFieldsWithUnpickleable:
"""Tests that ignore_fields can be used to work around unpickleable arguments."""

def test_ignore_unpickleable_field_allows_caching(self, setup_sync_redis: redis.Redis):
"""Redis cache should work when unpickleable field is ignored."""
call_count = 0

@redis_cache(ttl=60, ignore_fields=("callback",))
def cached_func(value: int, callback) -> int:
nonlocal call_count
call_count += 1
return value * 2

my_lambda = lambda x: x # noqa: E731
result1 = cached_func(5, callback=my_lambda)
result2 = cached_func(5, callback=my_lambda)

assert result1 == result2 == 10
assert call_count == 1

def test_ignore_lock_field_allows_caching(self, setup_sync_redis: redis.Redis):
"""Redis cache should work when lock field is ignored."""
call_count = 0

@redis_cache(ttl=60, ignore_fields=("lock",))
def cached_func(value: int, lock: threading.Lock) -> int:
nonlocal call_count
call_count += 1
return value * 2

lock = threading.Lock()
result1 = cached_func(5, lock=lock)
result2 = cached_func(5, lock=lock)

assert result1 == result2 == 10
assert call_count == 1
Empty file.
Loading