Skip to content

Commit 81411e3

Browse files
authored
Merge pull request #3 from agkloop/opt-loader
Opt loader
2 parents b63c3bf + 16344a8 commit 81411e3

File tree

8 files changed

+347
-10
lines changed

8 files changed

+347
-10
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,4 +136,5 @@ dmypy.json
136136
venv/
137137
benchmarks.log
138138
scalene_profile.json
139+
/tests/ts_examples.py
139140

CHANGELOG.md

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,21 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8-
## 0.1.5 - 2025-12-15
8+
## [0.1.6] - 2025-12-15
9+
10+
### Changed
11+
- `JsonSerializer` now uses `orjson` for significantly faster JSON serialization/deserialization (~2-3x faster)
12+
- `BGCache.register_loader` with `run_immediately=True` now checks if data exists in cache before executing the loader function, avoiding unnecessary function execution when data is already present in Redis/L2 cache.
13+
14+
### Added
15+
- Comprehensive cache rehydration tests for all decorators (TTLCache, SWRCache, BGCache) verifying that existing Redis data is retrieved without re-executing functions.
16+
- 7 new integration tests in `TestCacheRehydration` class covering cache hit and cache miss scenarios for all decorators.
17+
18+
### Performance
19+
- Reduced unnecessary loader executions in BGCache when Redis already contains fresh data.
20+
- Improved JSON serialization performance with orjson integration.
21+
22+
## [0.1.5] - 2025-12-15
923

1024
### Added
1125
- RedisCache now supports pluggable serializers with built-ins for `pickle` (default) and `json`, plus custom `dumps`/`loads` implementations.

pyproject.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
44

55
[project]
66
name = "advanced-caching"
7-
version = "0.1.5"
7+
version = "0.1.6"
88
description = "Production-ready composable caching with TTL, SWR, and background refresh patterns for Python."
99
readme = "README.md"
1010
requires-python = ">=3.10"
@@ -28,7 +28,8 @@ classifiers = [
2828
"Typing :: Typed",
2929
]
3030
dependencies = [
31-
"apscheduler>=3.10"
31+
"apscheduler>=3.10",
32+
"orjson>=3.11.5",
3233
]
3334

3435
[project.optional-dependencies]

src/advanced_caching/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
Expose storage backends, decorators, and scheduler utilities under `advanced_caching`.
55
"""
66

7-
__version__ = "0.1.5"
7+
__version__ = "0.1.6"
88

99
from .storage import (
1010
InMemCache,

src/advanced_caching/decorators.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -615,9 +615,11 @@ def refresh_job():
615615
# Get shared scheduler
616616
scheduler = _SharedScheduler.get_scheduler()
617617

618-
# Run immediately if requested
618+
# Run immediately if requested (but only if cache is empty)
619619
if run_immediately:
620-
refresh_job()
620+
cache_obj = get_cache()
621+
if cache_obj.get(cache_key) is None:
622+
refresh_job()
621623

622624
# Schedule periodic refresh
623625
scheduler.add_job(

src/advanced_caching/storage.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
import time
1515
from dataclasses import dataclass
1616
from typing import Any, Protocol
17+
import orjson
1718

1819
try:
1920
import redis
@@ -45,18 +46,18 @@ def loads(data: bytes) -> Any:
4546

4647

4748
class JsonSerializer:
48-
"""JSON serializer for text-friendly payloads (wraps CacheEntry)."""
49+
"""JSON serializer for text-friendly payloads (wraps CacheEntry). Uses orjson"""
4950

5051
__slots__ = ()
5152
handles_entries = False
5253

5354
@staticmethod
5455
def dumps(obj: Any) -> bytes:
55-
return json.dumps(obj, separators=(",", ":")).encode("utf-8")
56+
return orjson.dumps(obj)
5657

5758
@staticmethod
5859
def loads(data: bytes) -> Any:
59-
return json.loads(data.decode("utf-8"))
60+
return orjson.loads(data)
6061

6162

6263
_BUILTIN_SERIALIZERS: dict[str, Serializer] = {

tests/test_integration_redis.py

Lines changed: 235 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -670,6 +670,241 @@ def test_hybridcache_very_short_l2_ttl(self, redis_client):
670670
assert cache.get("short_ttl") == "value"
671671

672672

673+
class TestCacheRehydration:
674+
"""Test that decorators can retrieve existing data from Redis without re-executing functions."""
675+
676+
def test_ttlcache_rehydrates_from_redis(self, redis_client):
677+
"""Test TTLCache retrieves existing Redis data without executing function."""
678+
# Pre-populate Redis
679+
test_data = {"result": "from_redis"}
680+
redis_client.setex("compute:42", 60, pickle.dumps(test_data))
681+
682+
call_count = 0
683+
684+
@TTLCache.cached(
685+
"compute:{}",
686+
ttl=60,
687+
cache=lambda: HybridCache(
688+
l1_cache=InMemCache(),
689+
l2_cache=RedisCache(redis_client=redis_client),
690+
l1_ttl=60,
691+
),
692+
)
693+
def compute(x):
694+
nonlocal call_count
695+
call_count += 1
696+
return {"result": f"computed_{x}"}
697+
698+
# First call should retrieve from Redis without executing function
699+
result = compute(42)
700+
assert result == test_data
701+
assert call_count == 0, "Function should not execute when data exists in Redis"
702+
703+
# Second call should hit L1 cache
704+
result = compute(42)
705+
assert result == test_data
706+
assert call_count == 0
707+
708+
def test_swrcache_rehydrates_from_redis(self, redis_client):
709+
"""Test SWRCache retrieves existing Redis data without executing function."""
710+
# Pre-populate Redis with CacheEntry
711+
now = time.time()
712+
entry = CacheEntry(
713+
value={"result": "from_redis"}, fresh_until=now + 60, created_at=now
714+
)
715+
redis_cache = RedisCache(redis_client=redis_client)
716+
redis_cache.set_entry("fetch:99", entry, ttl=60)
717+
718+
call_count = 0
719+
720+
@SWRCache.cached(
721+
"fetch:{}",
722+
ttl=60,
723+
stale_ttl=30,
724+
cache=lambda: HybridCache(
725+
l1_cache=InMemCache(),
726+
l2_cache=RedisCache(redis_client=redis_client),
727+
l1_ttl=60,
728+
),
729+
)
730+
def fetch(x):
731+
nonlocal call_count
732+
call_count += 1
733+
return {"result": f"fetched_{x}"}
734+
735+
# First call should retrieve from Redis without executing function
736+
result = fetch(99)
737+
assert result == {"result": "from_redis"}
738+
assert call_count == 0, "Function should not execute when data exists in Redis"
739+
740+
# Second call should hit L1 cache
741+
result = fetch(99)
742+
assert result == {"result": "from_redis"}
743+
assert call_count == 0
744+
745+
def test_bgcache_rehydrates_from_redis(self, redis_client):
746+
"""Test BGCache retrieves existing Redis data without executing function on init."""
747+
# Pre-populate Redis
748+
test_data = {"users": ["Alice", "Bob", "Charlie"]}
749+
redis_client.setex("users_list_rehydrate", 60, pickle.dumps(test_data))
750+
751+
call_count = 0
752+
753+
@BGCache.register_loader(
754+
key="users_list_rehydrate",
755+
interval_seconds=60,
756+
run_immediately=True,
757+
cache=lambda: HybridCache(
758+
l1_cache=InMemCache(),
759+
l2_cache=RedisCache(redis_client=redis_client),
760+
l1_ttl=60,
761+
),
762+
)
763+
def load_users():
764+
nonlocal call_count
765+
call_count += 1
766+
return {"users": ["New1", "New2"]}
767+
768+
# Function should not execute during init (data exists in Redis)
769+
assert call_count == 0, "Function should not execute when data exists in Redis"
770+
771+
# First call should hit L1 cache
772+
result = load_users()
773+
assert result == test_data
774+
assert call_count == 0
775+
776+
BGCache.shutdown(wait=False)
777+
778+
def test_ttlcache_executes_on_cache_miss(self, redis_client):
779+
"""Test TTLCache executes function when Redis is empty."""
780+
redis_client.flushdb()
781+
782+
call_count = 0
783+
784+
@TTLCache.cached(
785+
"compute:{}",
786+
ttl=60,
787+
cache=lambda: HybridCache(
788+
l1_cache=InMemCache(),
789+
l2_cache=RedisCache(redis_client=redis_client),
790+
l1_ttl=60,
791+
),
792+
)
793+
def compute(x):
794+
nonlocal call_count
795+
call_count += 1
796+
return {"result": f"computed_{x}"}
797+
798+
# First call should execute function (cache miss)
799+
result = compute(42)
800+
assert result == {"result": "computed_42"}
801+
assert call_count == 1
802+
803+
# Second call should hit L1 cache
804+
result = compute(42)
805+
assert result == {"result": "computed_42"}
806+
assert call_count == 1
807+
808+
def test_swrcache_executes_on_cache_miss(self, redis_client):
809+
"""Test SWRCache executes function when Redis is empty."""
810+
redis_client.flushdb()
811+
812+
call_count = 0
813+
814+
@SWRCache.cached(
815+
"fetch:{}",
816+
ttl=60,
817+
stale_ttl=30,
818+
cache=lambda: HybridCache(
819+
l1_cache=InMemCache(),
820+
l2_cache=RedisCache(redis_client=redis_client),
821+
l1_ttl=60,
822+
),
823+
)
824+
def fetch(x):
825+
nonlocal call_count
826+
call_count += 1
827+
return {"result": f"fetched_{x}"}
828+
829+
# First call should execute function (cache miss)
830+
result = fetch(99)
831+
assert result == {"result": "fetched_99"}
832+
assert call_count == 1
833+
834+
# Second call should hit L1 cache
835+
result = fetch(99)
836+
assert result == {"result": "fetched_99"}
837+
assert call_count == 1
838+
839+
def test_bgcache_executes_on_cache_miss(self, redis_client):
840+
"""Test BGCache executes function on init when Redis is empty."""
841+
redis_client.flushdb()
842+
843+
call_count = 0
844+
845+
@BGCache.register_loader(
846+
key="empty_test_bgcache",
847+
interval_seconds=60,
848+
run_immediately=True,
849+
cache=lambda: HybridCache(
850+
l1_cache=InMemCache(),
851+
l2_cache=RedisCache(redis_client=redis_client),
852+
l1_ttl=60,
853+
),
854+
)
855+
def load_data():
856+
nonlocal call_count
857+
call_count += 1
858+
return {"data": "fresh_load"}
859+
860+
# Function should execute during init (cache miss)
861+
assert call_count == 1
862+
863+
# First call should hit L1 cache
864+
result = load_data()
865+
assert result == {"data": "fresh_load"}
866+
assert call_count == 1
867+
868+
BGCache.shutdown(wait=False)
869+
870+
def test_ttlcache_different_args_separate_entries(self, redis_client):
871+
"""Test TTLCache creates separate cache entries for different arguments."""
872+
# Pre-populate Redis with data for arg=10
873+
test_data = {"result": "from_redis_10"}
874+
redis_client.setex("compute:10", 60, pickle.dumps(test_data))
875+
876+
call_count = 0
877+
878+
@TTLCache.cached(
879+
"compute:{}",
880+
ttl=60,
881+
cache=lambda: HybridCache(
882+
l1_cache=InMemCache(),
883+
l2_cache=RedisCache(redis_client=redis_client),
884+
l1_ttl=60,
885+
),
886+
)
887+
def compute(x):
888+
nonlocal call_count
889+
call_count += 1
890+
return {"result": f"computed_{x}"}
891+
892+
# Call with arg=10 should get from Redis
893+
result = compute(10)
894+
assert result == test_data
895+
assert call_count == 0
896+
897+
# Call with arg=20 should execute function (no Redis data)
898+
result = compute(20)
899+
assert result == {"result": "computed_20"}
900+
assert call_count == 1
901+
902+
# Call with arg=10 again should get from L1
903+
result = compute(10)
904+
assert result == test_data
905+
assert call_count == 1
906+
907+
673908
class TestRedisPerformance:
674909
"""Performance tests with Redis backend."""
675910

0 commit comments

Comments
 (0)