Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -136,4 +136,5 @@ dmypy.json
venv/
benchmarks.log
scalene_profile.json
/tests/ts_examples.py

16 changes: 15 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,21 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## 0.1.5 - 2025-12-15
## [0.1.6] - 2025-12-15

### Changed
- `JsonSerializer` now uses `orjson` for significantly faster JSON serialization/deserialization (~2-3x faster)
- `BGCache.register_loader` with `run_immediately=True` now checks if data exists in cache before executing the loader function, avoiding unnecessary function execution when data is already present in Redis/L2 cache.

### Added
- Comprehensive cache rehydration tests for all decorators (TTLCache, SWRCache, BGCache) verifying that existing Redis data is retrieved without re-executing functions.
- 7 new integration tests in `TestCacheRehydration` class covering cache hit and cache miss scenarios for all decorators.

### Performance
- Reduced unnecessary loader executions in BGCache when Redis already contains fresh data.
- Improved JSON serialization performance with orjson integration.

## [0.1.5] - 2025-12-15

### Added
- RedisCache now supports pluggable serializers with built-ins for `pickle` (default) and `json`, plus custom `dumps`/`loads` implementations.
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "advanced-caching"
version = "0.1.5"
version = "0.1.6"
description = "Production-ready composable caching with TTL, SWR, and background refresh patterns for Python."
readme = "README.md"
requires-python = ">=3.10"
Expand All @@ -28,7 +28,8 @@ classifiers = [
"Typing :: Typed",
]
dependencies = [
"apscheduler>=3.10"
"apscheduler>=3.10",
"orjson>=3.11.5",
]

[project.optional-dependencies]
Expand Down
2 changes: 1 addition & 1 deletion src/advanced_caching/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
Expose storage backends, decorators, and scheduler utilities under `advanced_caching`.
"""

__version__ = "0.1.5"
__version__ = "0.1.6"

from .storage import (
InMemCache,
Expand Down
6 changes: 4 additions & 2 deletions src/advanced_caching/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -615,9 +615,11 @@ def refresh_job():
# Get shared scheduler
scheduler = _SharedScheduler.get_scheduler()

# Run immediately if requested
# Run immediately if requested (but only if cache is empty)
if run_immediately:
refresh_job()
cache_obj = get_cache()
if cache_obj.get(cache_key) is None:
refresh_job()

# Schedule periodic refresh
scheduler.add_job(
Expand Down
7 changes: 4 additions & 3 deletions src/advanced_caching/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import time
from dataclasses import dataclass
from typing import Any, Protocol
import orjson

try:
import redis
Expand Down Expand Up @@ -45,18 +46,18 @@ def loads(data: bytes) -> Any:


class JsonSerializer:
"""JSON serializer for text-friendly payloads (wraps CacheEntry)."""
"""JSON serializer for text-friendly payloads (wraps CacheEntry). Uses orjson"""

__slots__ = ()
handles_entries = False

@staticmethod
def dumps(obj: Any) -> bytes:
return json.dumps(obj, separators=(",", ":")).encode("utf-8")
return orjson.dumps(obj)

@staticmethod
def loads(data: bytes) -> Any:
return json.loads(data.decode("utf-8"))
return orjson.loads(data)


_BUILTIN_SERIALIZERS: dict[str, Serializer] = {
Expand Down
235 changes: 235 additions & 0 deletions tests/test_integration_redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -670,6 +670,241 @@ def test_hybridcache_very_short_l2_ttl(self, redis_client):
assert cache.get("short_ttl") == "value"


class TestCacheRehydration:
"""Test that decorators can retrieve existing data from Redis without re-executing functions."""

def test_ttlcache_rehydrates_from_redis(self, redis_client):
"""Test TTLCache retrieves existing Redis data without executing function."""
# Pre-populate Redis
test_data = {"result": "from_redis"}
redis_client.setex("compute:42", 60, pickle.dumps(test_data))

call_count = 0

@TTLCache.cached(
"compute:{}",
ttl=60,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def compute(x):
nonlocal call_count
call_count += 1
return {"result": f"computed_{x}"}

# First call should retrieve from Redis without executing function
result = compute(42)
assert result == test_data
assert call_count == 0, "Function should not execute when data exists in Redis"

# Second call should hit L1 cache
result = compute(42)
assert result == test_data
assert call_count == 0

def test_swrcache_rehydrates_from_redis(self, redis_client):
"""Test SWRCache retrieves existing Redis data without executing function."""
# Pre-populate Redis with CacheEntry
now = time.time()
entry = CacheEntry(
value={"result": "from_redis"}, fresh_until=now + 60, created_at=now
)
redis_cache = RedisCache(redis_client=redis_client)
redis_cache.set_entry("fetch:99", entry, ttl=60)

call_count = 0

@SWRCache.cached(
"fetch:{}",
ttl=60,
stale_ttl=30,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def fetch(x):
nonlocal call_count
call_count += 1
return {"result": f"fetched_{x}"}

# First call should retrieve from Redis without executing function
result = fetch(99)
assert result == {"result": "from_redis"}
assert call_count == 0, "Function should not execute when data exists in Redis"

# Second call should hit L1 cache
result = fetch(99)
assert result == {"result": "from_redis"}
assert call_count == 0

def test_bgcache_rehydrates_from_redis(self, redis_client):
"""Test BGCache retrieves existing Redis data without executing function on init."""
# Pre-populate Redis
test_data = {"users": ["Alice", "Bob", "Charlie"]}
redis_client.setex("users_list_rehydrate", 60, pickle.dumps(test_data))

call_count = 0

@BGCache.register_loader(
key="users_list_rehydrate",
interval_seconds=60,
run_immediately=True,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def load_users():
nonlocal call_count
call_count += 1
return {"users": ["New1", "New2"]}

# Function should not execute during init (data exists in Redis)
assert call_count == 0, "Function should not execute when data exists in Redis"

# First call should hit L1 cache
result = load_users()
assert result == test_data
assert call_count == 0

BGCache.shutdown(wait=False)

def test_ttlcache_executes_on_cache_miss(self, redis_client):
"""Test TTLCache executes function when Redis is empty."""
redis_client.flushdb()

call_count = 0

@TTLCache.cached(
"compute:{}",
ttl=60,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def compute(x):
nonlocal call_count
call_count += 1
return {"result": f"computed_{x}"}

# First call should execute function (cache miss)
result = compute(42)
assert result == {"result": "computed_42"}
assert call_count == 1

# Second call should hit L1 cache
result = compute(42)
assert result == {"result": "computed_42"}
assert call_count == 1

def test_swrcache_executes_on_cache_miss(self, redis_client):
"""Test SWRCache executes function when Redis is empty."""
redis_client.flushdb()

call_count = 0

@SWRCache.cached(
"fetch:{}",
ttl=60,
stale_ttl=30,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def fetch(x):
nonlocal call_count
call_count += 1
return {"result": f"fetched_{x}"}

# First call should execute function (cache miss)
result = fetch(99)
assert result == {"result": "fetched_99"}
assert call_count == 1

# Second call should hit L1 cache
result = fetch(99)
assert result == {"result": "fetched_99"}
assert call_count == 1

def test_bgcache_executes_on_cache_miss(self, redis_client):
"""Test BGCache executes function on init when Redis is empty."""
redis_client.flushdb()

call_count = 0

@BGCache.register_loader(
key="empty_test_bgcache",
interval_seconds=60,
run_immediately=True,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def load_data():
nonlocal call_count
call_count += 1
return {"data": "fresh_load"}

# Function should execute during init (cache miss)
assert call_count == 1

# First call should hit L1 cache
result = load_data()
assert result == {"data": "fresh_load"}
assert call_count == 1

BGCache.shutdown(wait=False)

def test_ttlcache_different_args_separate_entries(self, redis_client):
"""Test TTLCache creates separate cache entries for different arguments."""
# Pre-populate Redis with data for arg=10
test_data = {"result": "from_redis_10"}
redis_client.setex("compute:10", 60, pickle.dumps(test_data))

call_count = 0

@TTLCache.cached(
"compute:{}",
ttl=60,
cache=lambda: HybridCache(
l1_cache=InMemCache(),
l2_cache=RedisCache(redis_client=redis_client),
l1_ttl=60,
),
)
def compute(x):
nonlocal call_count
call_count += 1
return {"result": f"computed_{x}"}

# Call with arg=10 should get from Redis
result = compute(10)
assert result == test_data
assert call_count == 0

# Call with arg=20 should execute function (no Redis data)
result = compute(20)
assert result == {"result": "computed_20"}
assert call_count == 1

# Call with arg=10 again should get from L1
result = compute(10)
assert result == test_data
assert call_count == 1


class TestRedisPerformance:
"""Performance tests with Redis backend."""

Expand Down
Loading
Loading