Skip to content

Commit 3647f20

Browse files
committed
1 parent e73858c commit 3647f20

File tree

10 files changed

+250
-179
lines changed

10 files changed

+250
-179
lines changed

cache/__init__.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,10 @@
1+
"""
2+
Async caching library providing LRU and TTL caching decorators.
3+
"""
4+
15
from .async_lru import AsyncLRU
26
from .async_ttl import AsyncTTL
7+
from .types import T, AsyncFunc
8+
9+
__all__ = ['AsyncLRU', 'AsyncTTL']
10+
__version__ = '1.1.1'

cache/async_lru.py

Lines changed: 24 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,31 @@
1+
from functools import wraps
2+
from typing import Optional, Coroutine
3+
14
from .key import KEY
25
from .lru import LRU
6+
from .types import T, AsyncFunc, Callable, Any
37

48

59
class AsyncLRU:
6-
def __init__(self, maxsize=128):
7-
"""
8-
:param maxsize: Use maxsize as None for unlimited size cache
9-
"""
10-
self.lru = LRU(maxsize=maxsize)
11-
12-
def cache_clear(self):
13-
"""
14-
Clears the LRU cache.
15-
16-
This method empties the cache, removing all stored
17-
entries and effectively resetting the cache.
18-
19-
:return: None
20-
"""
21-
self.lru.clear()
22-
23-
def __call__(self, func):
24-
async def wrapper(*args, use_cache=True, **kwargs):
25-
key = KEY(args, kwargs)
26-
if key in self.lru and use_cache:
27-
return self.lru[key]
28-
else:
29-
self.lru[key] = await func(*args, **kwargs)
30-
return self.lru[key]
31-
32-
wrapper.__name__ += func.__name__
33-
wrapper.__dict__['cache_clear'] = self.cache_clear
10+
"""Async Least Recently Used (LRU) cache decorator."""
11+
12+
def __init__(self, maxsize: Optional[int] = 128) -> None:
13+
self.lru: LRU = LRU(maxsize=maxsize)
14+
15+
def __call__(self, func: AsyncFunc) -> Callable[..., Coroutine[Any, Any, T]]:
16+
@wraps(func)
17+
async def wrapper(*args: Any, use_cache: bool = True, **kwargs: Any) -> T:
18+
if not use_cache:
19+
return await func(*args, **kwargs)
20+
21+
key: KEY = KEY(args, kwargs)
22+
23+
if await self.lru.contains(key):
24+
return await self.lru.get(key)
25+
26+
result: T = await func(*args, **kwargs)
27+
await self.lru.set(key, result)
28+
return result
3429

30+
wrapper.cache_clear = self.lru.clear # type: ignore
3531
return wrapper

cache/async_ttl.py

Lines changed: 50 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -1,76 +1,69 @@
1-
import datetime
1+
import time
2+
from functools import wraps
3+
from typing import Dict, Optional, Tuple, Coroutine
24

3-
from .key import KEY
45
from .lru import LRU
6+
from .types import T, AsyncFunc, Callable, Any
57

68

7-
class AsyncTTL:
8-
class _TTL(LRU):
9-
def __init__(self, time_to_live, maxsize):
10-
super().__init__(maxsize=maxsize)
11-
12-
self.time_to_live = (
13-
datetime.timedelta(seconds=time_to_live) if time_to_live else None
14-
)
9+
class TTL(LRU):
10+
"""Time-To-Live (TTL) cache implementation extending LRU cache."""
1511

16-
self.maxsize = maxsize
12+
def __init__(self, maxsize: Optional[int] = 128, time_to_live: int = 0) -> None:
13+
super().__init__(maxsize=maxsize)
14+
self.time_to_live: int = time_to_live
15+
self.timestamps: Dict[Any, float] = {}
1716

18-
def __contains__(self, key):
19-
if key not in self.keys():
17+
async def contains(self, key: Any) -> bool:
18+
async with self._lock:
19+
exists = await super().contains(key)
20+
if not exists:
2021
return False
21-
else:
22-
key_expiration = super().__getitem__(key)[1]
23-
if key_expiration and key_expiration < datetime.datetime.now():
24-
del self[key]
22+
if self.time_to_live:
23+
timestamp: float = self.timestamps.get(key, 0)
24+
if time.time() - timestamp > self.time_to_live:
25+
del self.cache[key]
26+
del self.timestamps[key]
2527
return False
26-
else:
27-
return True
28+
return True
2829

29-
def __getitem__(self, key):
30-
value = super().__getitem__(key)[0]
31-
return value
30+
async def set(self, key: Any, value: Any) -> None:
31+
async with self._lock:
32+
await super().set(key, value)
33+
if self.time_to_live:
34+
self.timestamps[key] = time.time()
3235

33-
def __setitem__(self, key, value):
34-
ttl_value = (
35-
(datetime.datetime.now() + self.time_to_live)
36-
if self.time_to_live
37-
else None
38-
)
39-
super().__setitem__(key, (value, ttl_value))
36+
async def clear(self) -> None:
37+
async with self._lock:
38+
await super().clear()
39+
self.timestamps.clear()
4040

41-
def __init__(self, time_to_live=60, maxsize=1024, skip_args: int = 0):
42-
"""
4341

44-
:param time_to_live: Use time_to_live as None for non expiring cache
45-
:param maxsize: Use maxsize as None for unlimited size cache
46-
:param skip_args: Use `1` to skip first arg of func in determining cache key
47-
"""
48-
self.ttl = self._TTL(time_to_live=time_to_live, maxsize=maxsize)
49-
self.skip_args = skip_args
50-
51-
def cache_clear(self):
52-
"""
53-
Clears the TTL cache.
42+
class AsyncTTL:
43+
"""Async Time-To-Live (TTL) cache decorator."""
5444

55-
This method empties the cache, removing all stored
56-
entries and effectively resetting the cache.
45+
def __init__(self,
46+
time_to_live: int = 0,
47+
maxsize: Optional[int] = 128,
48+
skip_args: int = 0) -> None:
49+
self.ttl: TTL = TTL(maxsize=maxsize, time_to_live=time_to_live)
50+
self.skip_args: int = skip_args
5751

58-
:return: None
59-
"""
60-
self.ttl.clear()
52+
def __call__(self, func: AsyncFunc) -> Callable[..., Coroutine[Any, Any, T]]:
53+
@wraps(func)
54+
async def wrapper(*args: Any, use_cache: bool = True, **kwargs: Any) -> T:
55+
if not use_cache:
56+
return await func(*args, **kwargs)
6157

62-
def __call__(self, func):
63-
async def wrapper(*args, use_cache=True, **kwargs):
64-
key = KEY(args[self.skip_args:], kwargs)
65-
if key in self.ttl and use_cache:
66-
val = self.ttl[key]
67-
else:
68-
self.ttl[key] = await func(*args, **kwargs)
69-
val = self.ttl[key]
58+
key: Tuple[Any, ...] = (*args[self.skip_args:], *sorted(kwargs.items()))
7059

71-
return val
60+
if await self.ttl.contains(key):
61+
return await self.ttl.get(key)
7262

73-
wrapper.__name__ += func.__name__
74-
wrapper.__dict__['cache_clear'] = self.cache_clear
63+
result: T = await func(*args, **kwargs)
64+
await self.ttl.set(key, result)
65+
return result
7566

67+
# Add cache_clear method to the wrapper
68+
wrapper.cache_clear = self.ttl.clear # type: ignore
7669
return wrapper

cache/key.py

Lines changed: 21 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,31 @@
1-
from typing import Any
1+
from typing import Any, Dict, Tuple
2+
3+
from .types import CacheKey
24

35

46
class KEY:
5-
def __init__(self, *args, **kwargs):
6-
self.args = args
7-
self.kwargs = kwargs
8-
kwargs.pop("use_cache", None)
7+
"""
8+
A hashable key class for cache implementations that handles complex arguments.
9+
Supports primitive types, tuples, dictionaries, and objects with __dict__.
10+
"""
11+
12+
def __init__(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> None:
13+
self.args: Tuple[Any, ...] = args
14+
self.kwargs: Dict[str, Any] = {k: v for k, v in kwargs.items() if k != "use_cache"}
915

10-
def __eq__(self, obj):
11-
return hash(self) == hash(obj)
16+
def __eq__(self, other: object) -> bool:
17+
if not isinstance(other, KEY):
18+
return NotImplemented
19+
return hash(self) == hash(other)
1220

13-
def __hash__(self):
14-
def _hash(param: Any):
21+
def __hash__(self) -> int:
22+
def _hash(param: Any) -> CacheKey:
1523
if isinstance(param, tuple):
16-
return tuple(map(_hash, param))
24+
return tuple(_hash(item) for item in param)
1725
if isinstance(param, dict):
18-
return tuple(map(_hash, param.items()))
26+
return tuple(sorted((_hash(k), _hash(v)) for k, v in param.items()))
1927
elif hasattr(param, "__dict__"):
20-
return str(vars(param))
21-
else:
22-
return str(param)
28+
return str(sorted(vars(param).items()))
29+
return str(param)
2330

2431
return hash(_hash(self.args) + _hash(self.kwargs))

cache/lru.py

Lines changed: 29 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,34 @@
1+
from asyncio import Lock
12
from collections import OrderedDict
2-
from copy import deepcopy
3+
from typing import Any, Optional
34

45

5-
class LRU(OrderedDict):
6-
def __init__(self, maxsize, *args, **kwargs):
7-
self.maxsize = maxsize
8-
super().__init__(*args, **kwargs)
6+
class LRU:
7+
"""Thread-safe LRU cache implementation."""
98

10-
def __getitem__(self, key):
11-
value = deepcopy(super().__getitem__(key))
12-
self.move_to_end(key)
13-
return value
9+
def __init__(self, maxsize: Optional[int] = 128) -> None:
10+
self.maxsize: Optional[int] = maxsize
11+
self.cache: OrderedDict = OrderedDict()
12+
self._lock: Lock = Lock()
1413

15-
def __setitem__(self, key, value):
16-
super().__setitem__(key, deepcopy(value))
17-
if self.maxsize and len(self) > self.maxsize:
18-
oldest = next(iter(self))
19-
del self[oldest]
14+
async def contains(self, key: Any) -> bool:
15+
async with self._lock:
16+
return key in self.cache
17+
18+
async def get(self, key: Any) -> Any:
19+
async with self._lock:
20+
value: Any = self.cache.pop(key)
21+
self.cache[key] = value
22+
return value
23+
24+
async def set(self, key: Any, value: Any) -> None:
25+
async with self._lock:
26+
if key in self.cache:
27+
self.cache.pop(key)
28+
elif self.maxsize and len(self.cache) >= self.maxsize:
29+
self.cache.popitem(last=False)
30+
self.cache[key] = value
31+
32+
async def clear(self) -> None:
33+
async with self._lock:
34+
self.cache.clear()

cache/types.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
from typing import TypeVar, Callable, Coroutine, Any, Dict, Tuple, Union
2+
3+
T = TypeVar('T') # Generic return type
4+
CacheKey = Union[Tuple[Any, ...], str]
5+
AsyncFunc = Callable[..., Coroutine[Any, Any, T]]
6+
CacheDict = Dict[Any, Any]

pyproject.toml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,15 +59,27 @@ addopts = """
5959
--cov-report=term-missing
6060
--cov-report=xml
6161
--cov-report=html
62+
--timeout=20
6263
-v
6364
"""
6465
testpaths = ["tests"]
6566
python_files = ["*_test.py"]
6667
asyncio_mode = "auto"
68+
asyncio_default_fixture_loop_scope = "function"
6769

6870
[tool.coverage.run]
6971
branch = true
7072
source = ["cache"]
73+
exclude_lines = [
74+
"pragma: no cover",
75+
"def __repr__",
76+
"if __name__ == .__main__.:",
77+
"raise NotImplementedError",
78+
"if TYPE_CHECKING:",
79+
]
80+
exclude_list = [
81+
"tests",
82+
]
7183

7284
[tool.coverage.report]
7385
exclude_lines = [

requirements-dev.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@ pyright==1.1.399
33

44
pytest==8.3.5
55
pytest-asyncio==0.26.0
6-
pytest-cov==6.1.1
6+
pytest-cov==6.1.1
7+
pytest-timeout==2.3.1

0 commit comments

Comments
 (0)