Skip to content

Commit 9a00458

Browse files
committed
added Cache model and backends
1 parent 7bfb304 commit 9a00458

File tree

12 files changed

+967
-0
lines changed

12 files changed

+967
-0
lines changed

ellar/cache/__init__.py

Whitespace-only changes.

ellar/cache/backends/__init__.py

Whitespace-only changes.

ellar/cache/backends/aio_cache.py

Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
import asyncio
2+
import pickle
3+
import typing as t
4+
from abc import ABC
5+
6+
try:
7+
from aiomcache import Client
8+
except ImportError as e: # pragma: no cover
9+
raise RuntimeError(
10+
"To use `AioMemCacheBackend`, you have to install 'aiomcache' package e.g. `pip install aiomcache`"
11+
) from e
12+
13+
14+
from ..interface import IBaseCacheBackendAsync
15+
from ..make_key_decorator import make_key_decorator
16+
from ..model import BaseCacheBackend
17+
18+
19+
class AioMemCacheBackendSync(IBaseCacheBackendAsync, ABC):
20+
def _async_executor(self, func: t.Awaitable) -> t.Any:
21+
return asyncio.get_event_loop().run_until_complete(func)
22+
23+
def get(self, key: str, version: str = None) -> t.Any:
24+
return self._async_executor(self.get_async(key, version=version))
25+
26+
def delete(self, key: str, version: str = None) -> bool:
27+
res = self._async_executor(self.delete_async(key, version=version))
28+
return bool(res)
29+
30+
def set(
31+
self, key: str, value: t.Any, timeout: int = None, version: str = None
32+
) -> bool:
33+
res = self._async_executor(
34+
self.set_async(key, value, version=version, timeout=timeout)
35+
)
36+
return bool(res)
37+
38+
def touch(self, key: str, timeout: int = None, version: str = None) -> bool:
39+
res = self._async_executor(
40+
self.touch_async(key, version=version, timeout=timeout)
41+
)
42+
return bool(res)
43+
44+
45+
class AioMemCacheBackend(AioMemCacheBackendSync, BaseCacheBackend):
46+
"""Memcached-based cache backend."""
47+
48+
pickle_protocol = pickle.HIGHEST_PROTOCOL
49+
50+
def __init__(
51+
self,
52+
host: str,
53+
port: int = 11211,
54+
pool_size: int = 2,
55+
pool_minsize: int = None,
56+
serializer: t.Callable = pickle.dumps,
57+
deserializer: t.Callable = pickle.loads,
58+
**kwargs: t.Any
59+
) -> None:
60+
super().__init__(**kwargs)
61+
self._client: Client = None # type: ignore[assignment]
62+
self._client_options = dict(
63+
host=host, port=port, pool_size=pool_size, pool_minsize=pool_minsize
64+
)
65+
self._serializer = serializer
66+
self._deserializer = deserializer
67+
68+
def get_backend_timeout(self, timeout: int = None) -> int:
69+
return int(super().get_backend_timeout(timeout))
70+
71+
@property
72+
def _cache_client(self) -> Client:
73+
if self._client is None:
74+
self._client = Client(**self._client_options)
75+
return self._client
76+
77+
@make_key_decorator
78+
async def get_async(self, key: str, version: str = None) -> t.Optional[t.Any]:
79+
value = await self._cache_client.get(key=key.encode("utf-8"))
80+
if value:
81+
return self._deserializer(value)
82+
return None # pragma: no cover
83+
84+
@make_key_decorator
85+
async def set_async(
86+
self, key: str, value: t.Any, timeout: int = None, version: str = None
87+
) -> bool:
88+
return await self._cache_client.set(
89+
key.encode("utf-8"),
90+
self._serializer(value, self.pickle_protocol),
91+
exptime=self.get_backend_timeout(timeout),
92+
)
93+
94+
@make_key_decorator
95+
async def delete_async(self, key: str, version: str = None) -> bool:
96+
return await self._cache_client.delete(key=key.encode("utf-8"))
97+
98+
@make_key_decorator
99+
async def touch_async(
100+
self, key: str, timeout: int = None, version: str = None
101+
) -> bool:
102+
return await self._cache_client.touch(
103+
key=key.encode("utf-8"), exptime=self.get_backend_timeout(timeout)
104+
)

ellar/cache/backends/base.py

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
import typing as t
2+
from abc import ABC
3+
4+
from starlette.concurrency import run_in_threadpool
5+
6+
from ..make_key_decorator import make_key_decorator, make_key_decorator_and_validate
7+
from ..model import BaseCacheBackend
8+
9+
10+
class BasePylibMemcachedCacheSync(BaseCacheBackend, ABC):
11+
_cache_client: t.Any
12+
13+
@make_key_decorator
14+
async def get(self, key: str, version: str = None) -> t.Any:
15+
return self._cache_client.get(key)
16+
17+
@make_key_decorator_and_validate
18+
def set(
19+
self, key: str, value: t.Any, timeout: int = None, version: str = None
20+
) -> bool:
21+
result = self._cache_client.set(
22+
key, value, int(self.get_backend_timeout(timeout))
23+
)
24+
if not result:
25+
# Make sure the key doesn't keep its old value in case of failure
26+
# to set (memcached's 1MB limit).
27+
self._cache_client.delete(key)
28+
return False
29+
return bool(result)
30+
31+
@make_key_decorator
32+
def delete(self, key: str, version: str = None) -> bool:
33+
result = self._cache_client.delete(key)
34+
return bool(result)
35+
36+
@make_key_decorator
37+
def touch(self, key: str, timeout: int = None, version: str = None) -> bool:
38+
result = self._cache_client.touch(key, self.get_backend_timeout(timeout))
39+
return bool(result)
40+
41+
def close(self, **kwargs: t.Any) -> None:
42+
# Many clients don't clean up connections properly.
43+
self._cache_client.disconnect_all()
44+
45+
def clear(self) -> None:
46+
self._cache_client.flush_all()
47+
48+
49+
class BasePylibMemcachedCache(BasePylibMemcachedCacheSync):
50+
def __init__(
51+
self,
52+
server: t.List[str],
53+
library_client_type: t.Type,
54+
options: t.Dict = None,
55+
**kwargs: t.Any
56+
):
57+
super().__init__(**kwargs)
58+
self._servers = server
59+
60+
self._cache_client_class: t.Type = library_client_type
61+
self._cache_client_init: t.Any = None
62+
self._options = options or {}
63+
64+
@property
65+
def client_servers(self) -> t.List[str]:
66+
return self._servers
67+
68+
@property
69+
def _cache_client(self) -> t.Any:
70+
"""
71+
Implement transparent thread-safe access to a memcached client.
72+
"""
73+
if self._cache_client_init is None:
74+
self._cache_client_init = self._cache_client_class(
75+
self.client_servers, **self._options
76+
)
77+
return self._cache_client_init
78+
79+
async def executor(self, func: t.Callable, *args: t.Any, **kwargs: t.Any) -> t.Any:
80+
return await run_in_threadpool(func, *args, **kwargs)
81+
82+
async def get_async(self, key: str, version: str = None) -> t.Any:
83+
return await self.executor(self.get, key)
84+
85+
async def set_async(
86+
self, key: str, value: t.Any, timeout: int = None, version: str = None
87+
) -> bool:
88+
result = await self.executor(self.set, key, value, timeout)
89+
return bool(result)
90+
91+
async def delete_async(self, key: str, version: str = None) -> bool:
92+
result = await self.executor(self.delete, key, version)
93+
return bool(result)
94+
95+
async def touch_async(
96+
self, key: str, timeout: int = None, version: str = None
97+
) -> bool:
98+
result = await self.executor(self.touch, key, timeout)
99+
return bool(result)
100+
101+
async def close_async(self, **kwargs: t.Any) -> None:
102+
# Many clients don't clean up connections properly.
103+
await self.executor(self._cache_client.disconnect_all)
104+
105+
async def clear_async(self) -> None:
106+
await self.executor(self._cache_client.flush_all)
107+
108+
def validate_key(self, key: str) -> None:
109+
super().validate_key(key)
110+
self._memcache_key_warnings(key)

ellar/cache/backends/pylib_cache.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
"""
2+
PyLibMCCacheBackend inspired by Django PyLibMCCache
3+
"""
4+
import typing as t
5+
6+
try:
7+
from pylibmc import Client
8+
except ImportError as e: # pragma: no cover
9+
raise RuntimeError(
10+
"To use `PyLibMCCacheBackend`, you have to install 'pylibmc' package e.g. `pip install pylibmc`"
11+
) from e
12+
13+
from .base import BasePylibMemcachedCache
14+
15+
16+
class PyLibMCCacheBackend(BasePylibMemcachedCache):
17+
"""An implementation of a cache binding using pylibmc"""
18+
19+
def __init__(self, server: t.List[str], options: t.Dict = None, **kwargs: t.Any):
20+
super().__init__(server, library_client_type=Client, options=options, **kwargs)
21+
22+
@property
23+
def client_servers(self) -> t.List[str]:
24+
output = []
25+
for server in self._servers:
26+
output.append(server.replace("unix:", ""))
27+
return output
28+
29+
async def close_async(self, **kwargs: t.Any) -> None:
30+
# libmemcached manages its own connections. Don't call disconnect_all()
31+
# as it resets the failover state and creates unnecessary reconnects.
32+
return None
33+
34+
def close(self, **kwargs: t.Any) -> None:
35+
return None

ellar/cache/backends/pymem_cache.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
"""
2+
PyMemcacheCacheBackend inspired by Django PyMemcacheCache
3+
"""
4+
import typing as t
5+
6+
try:
7+
from pymemcache import HashClient
8+
from pymemcache.serde import pickle_serde
9+
except ImportError as e: # pragma: no cover
10+
raise RuntimeError(
11+
"To use `PyMemcacheCacheBackend`, you have to install 'pymemcache' package e.g. `pip install pymemcache`"
12+
) from e
13+
from .base import BasePylibMemcachedCache
14+
15+
16+
class PyMemcacheCacheBackend(BasePylibMemcachedCache):
17+
"""An implementation of a cache binding using pymemcache."""
18+
19+
def __init__(self, server: t.List[str], options: t.Dict = None, **kwargs: t.Any):
20+
21+
_default_options = options or {}
22+
23+
_options = {
24+
"allow_unicode_keys": True,
25+
"default_noreply": False,
26+
"serde": pickle_serde,
27+
**_default_options,
28+
}
29+
30+
super().__init__(
31+
server, library_client_type=HashClient, options=_options, **kwargs
32+
)

0 commit comments

Comments
 (0)