Skip to content

Commit 6e9c9e2

Browse files
committed
on 0 ttl no caching
1 parent 47a9edf commit 6e9c9e2

File tree

3 files changed

+118
-40
lines changed

3 files changed

+118
-40
lines changed

README.md

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -88,16 +88,6 @@ user = await get_user_async(42)
8888
```
8989

9090
## Benchmarks
91-
92-
**Performance Comparison** (10ms baseline operation):
93-
94-
| Strategy | Time | Speedup |
95-
|----------|------|---------|
96-
| No Cache | 12.51 ms | 1x |
97-
| TTLCache | 0.0010 ms | 12,000x |
98-
| SWRCache | 0.0014 ms | 9,100x |
99-
| BGCache | 0.0003 ms | 37,000x |
100-
10191
Full benchmarks available in `tests/benchmark.py`.
10292

10393
## API Reference

src/advanced_caching/decorators.py

Lines changed: 58 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,11 @@
2020

2121
from .storage import InMemCache, CacheEntry, CacheStorage
2222

23-
logger = logging.getLogger(__name__)
24-
2523
T = TypeVar("T")
2624

25+
# Minimal logger used only for error reporting (no debug/info on hot paths)
26+
logger = logging.getLogger(__name__)
27+
2728

2829
# ============================================================================
2930
# TTLCache - Simple TTL-based caching decorator
@@ -81,6 +82,9 @@ def calculate(x):
8182

8283
def decorator(func: Callable[..., T]) -> Callable[..., T]:
8384
def wrapper(*args, **kwargs) -> T:
85+
# If ttl is 0 or negative, disable caching and call through
86+
if ttl <= 0:
87+
return func(*args, **kwargs)
8488
# Generate cache key
8589
if callable(key):
8690
cache_key = key(*args, **kwargs)
@@ -178,6 +182,9 @@ def get_user(user_id: int):
178182

179183
def decorator(func: Callable[..., T]) -> Callable[..., T]:
180184
def wrapper(*args, **kwargs) -> T:
185+
# If ttl is 0 or negative, disable caching and SWR behavior
186+
if ttl <= 0:
187+
return func(*args, **kwargs)
181188
# Generate cache key
182189
if callable(key):
183190
# Key function is responsible for handling args/defaults
@@ -212,7 +219,6 @@ def wrapper(*args, **kwargs) -> T:
212219

213220
if entry is None:
214221
# Cache miss - fetch now
215-
logger.debug(f"Cache MISS: {cache_key}")
216222
result = func(*args, **kwargs)
217223
now = time.time()
218224
cache_entry = CacheEntry(
@@ -221,16 +227,12 @@ def wrapper(*args, **kwargs) -> T:
221227
function_cache.set_entry(cache_key, cache_entry)
222228
return result
223229

224-
# Check if fresh (within TTL)
225230
if entry.is_fresh():
226-
logger.debug(f"Cache HIT (fresh): {cache_key}")
227231
return entry.value
228232

229-
# Stale - check if still within stale period
230233
age = entry.age()
231234
if age > (ttl + stale_ttl):
232235
# Too stale, fetch now
233-
logger.debug(f"Cache HIT (too stale): {cache_key}, age={age:.1f}s")
234236
result = func(*args, **kwargs)
235237
now = time.time()
236238
cache_entry = CacheEntry(
@@ -240,10 +242,6 @@ def wrapper(*args, **kwargs) -> T:
240242
return result
241243

242244
# Stale but within grace period - return stale and refresh in background
243-
logger.debug(
244-
f"Cache HIT (stale): {cache_key}, refreshing in background"
245-
)
246-
247245
# Try to acquire refresh lock
248246
lock_key = f"{cache_key}:refresh_lock"
249247
if enable_lock:
@@ -262,9 +260,11 @@ def refresh_job():
262260
value=new_value, fresh_until=now + ttl, created_at=now
263261
)
264262
function_cache.set_entry(cache_key, cache_entry)
265-
logger.debug(f"Background refresh complete: {cache_key}")
266-
except Exception as e:
267-
logger.error(f"Background refresh failed for {cache_key}: {e}")
263+
except Exception:
264+
# Log background refresh failures but never raise
265+
logger.exception(
266+
"SWR background refresh failed for key %r", cache_key
267+
)
268268

269269
thread = threading.Thread(target=refresh_job, daemon=True)
270270
thread.start()
@@ -315,7 +315,6 @@ def start(cls) -> None:
315315
if not cls._started:
316316
cls.get_scheduler().start()
317317
cls._started = True
318-
logger.info("Shared BackgroundScheduler started")
319318

320319
@classmethod
321320
def shutdown(cls, wait: bool = True) -> None:
@@ -325,7 +324,6 @@ def shutdown(cls, wait: bool = True) -> None:
325324
cls._scheduler.shutdown(wait=wait)
326325
cls._started = False
327326
cls._scheduler = None
328-
logger.info("Shared BackgroundScheduler stopped")
329327

330328

331329
# ============================================================================
@@ -390,8 +388,13 @@ def register_loader(
390388
Decorated function that returns cached data (sync or async).
391389
"""
392390
cache_key = key
393-
if ttl is None:
391+
# If interval_seconds <= 0 or ttl == 0, disable background scheduling and caching.
392+
if interval_seconds <= 0:
393+
interval_seconds = 0
394+
if ttl is None and interval_seconds > 0:
394395
ttl = interval_seconds * 2
396+
if ttl is None:
397+
ttl = 0
395398

396399
# Create a dedicated cache instance for this loader
397400
loader_cache = cache if cache is not None else InMemCache()
@@ -400,16 +403,38 @@ def decorator(loader_func: Callable[[], T]) -> Callable[[], T]:
400403
# Detect if function is async
401404
is_async = asyncio.iscoroutinefunction(loader_func)
402405

406+
# If no scheduling/caching is desired, just wrap the function and call through
407+
if interval_seconds <= 0 or ttl <= 0:
408+
if is_async:
409+
410+
async def async_wrapper() -> T:
411+
return await loader_func()
412+
413+
async_wrapper.__wrapped__ = loader_func # type: ignore
414+
async_wrapper.__name__ = loader_func.__name__ # type: ignore
415+
async_wrapper.__doc__ = loader_func.__doc__ # type: ignore
416+
async_wrapper._cache = loader_cache # type: ignore
417+
async_wrapper._cache_key = cache_key # type: ignore
418+
419+
return async_wrapper # type: ignore
420+
else:
421+
422+
def sync_wrapper() -> T:
423+
return loader_func()
424+
425+
sync_wrapper.__wrapped__ = loader_func # type: ignore
426+
sync_wrapper.__name__ = loader_func.__name__ # type: ignore
427+
sync_wrapper.__doc__ = loader_func.__doc__ # type: ignore
428+
sync_wrapper._cache = loader_cache # type: ignore
429+
sync_wrapper._cache_key = cache_key # type: ignore
430+
431+
return sync_wrapper # type: ignore
432+
403433
# Create wrapper that loads and caches
404434
def refresh_job():
405435
"""Job that runs periodically to refresh the cache."""
406436
try:
407-
logger.debug(f"Refreshing cache key: {cache_key}")
408-
start = time.time()
409-
410-
# Call function (async or sync)
411437
if is_async:
412-
# Run async function in new event loop
413438
loop = asyncio.new_event_loop()
414439
asyncio.set_event_loop(loop)
415440
try:
@@ -419,19 +444,22 @@ def refresh_job():
419444
else:
420445
data = loader_func()
421446

422-
duration = time.time() - start
423-
424447
loader_cache.set(cache_key, data, ttl)
425-
logger.info(
426-
f"Refreshed {cache_key} successfully in {duration:.3f}s"
427-
)
428448
except Exception as e:
429-
logger.error(f"Failed to refresh {cache_key}: {e}", exc_info=True)
449+
# User-provided error handler gets first chance
430450
if on_error:
431451
try:
432452
on_error(e)
433-
except Exception as err:
434-
logger.error(f"Error handler failed: {err}")
453+
except Exception:
454+
# Avoid user handler breaking the scheduler
455+
logger.exception(
456+
"BGCache error handler failed for key %r", cache_key
457+
)
458+
else:
459+
# Log uncaught loader errors for visibility
460+
logger.exception(
461+
"BGCache refresh job failed for key %r", cache_key
462+
)
435463

436464
# Get shared scheduler
437465
scheduler = _SharedScheduler.get_scheduler()

tests/test_correctness.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -606,5 +606,65 @@ def f(*, x: int) -> int:
606606
assert calls["n"] == 1
607607

608608

609+
class TestNoCachingWhenZero:
610+
"""Ensure ttl/interval_seconds == 0 disables caching/background behavior."""
611+
612+
def test_ttlcache_ttl_zero_disables_caching(self):
613+
calls = {"n": 0}
614+
615+
@TTLCache.cached("user:{}", ttl=0)
616+
def get_user(user_id: int) -> int:
617+
calls["n"] += 1
618+
return calls["n"]
619+
620+
# Each call should invoke the function (no caching)
621+
assert get_user(1) == 1
622+
assert get_user(1) == 2
623+
assert get_user(1) == 3
624+
assert calls["n"] == 3
625+
626+
def test_swrcache_ttl_zero_disables_caching(self):
627+
calls = {"n": 0}
628+
629+
@SWRCache.cached("data:{}", ttl=0, stale_ttl=10)
630+
def get_data(key: str) -> int:
631+
calls["n"] += 1
632+
return calls["n"]
633+
634+
# Each call should invoke the function (no SWR behavior)
635+
assert get_data("k") == 1
636+
assert get_data("k") == 2
637+
assert get_data("k") == 3
638+
assert calls["n"] == 3
639+
640+
def test_bgcache_interval_zero_disables_background_and_cache(self):
641+
calls = {"n": 0}
642+
643+
@BGCache.register_loader(key="no_bg", interval_seconds=0, ttl=None)
644+
def load_data() -> int:
645+
calls["n"] += 1
646+
return calls["n"]
647+
648+
# No background scheduler, no caching: each call increments
649+
assert load_data() == 1
650+
assert load_data() == 2
651+
assert load_data() == 3
652+
assert calls["n"] == 3
653+
654+
def test_bgcache_ttl_zero_disables_background_and_cache(self):
655+
calls = {"n": 0}
656+
657+
@BGCache.register_loader(key="no_bg_ttl", interval_seconds=10, ttl=0)
658+
def load_data() -> int:
659+
calls["n"] += 1
660+
return calls["n"]
661+
662+
# Because ttl == 0, wrapper should bypass cache and scheduler
663+
assert load_data() == 1
664+
assert load_data() == 2
665+
assert load_data() == 3
666+
assert calls["n"] == 3
667+
668+
609669
if __name__ == "__main__":
610670
pytest.main([__file__, "-v"])

0 commit comments

Comments
 (0)