PK!mkolas_cache/__init__.pyfrom .factory import create_cache from .backends.base import Cache from .support import CacheURL from .manager import CacheManager __all__ = ['create_cache', 'CacheURL', 'CacheManager', 'Cache'] PK! kolas_cache/backends/__init__.pyPK!'<kolas_cache/backends/base.pyimport asyncio from abc import ABC, abstractmethod from typing import Any class Cache(ABC): # pragma: nocover DEFAULT_TIMEOUT: int = 60 * 5 def __init__(self, prefix: str = None): self.prefix = prefix self.lock = asyncio.Lock() async def has(self, key: str) -> bool: return await self.get(key) is not None @abstractmethod async def set(self, key: str, value: Any, seconds: int = DEFAULT_TIMEOUT) -> None: raise NotImplementedError() @abstractmethod async def get(self, key: str, default: Any = None) -> Any: raise NotImplementedError() @abstractmethod async def flush(self) -> None: raise NotImplementedError() @abstractmethod async def forget(self, key: str) -> None: raise NotImplementedError() async def increment(self, key: str, delta: int = 1, default: int = 0) -> int: value = default if await self.has(key): value = await self.get(key) value = value + delta await self.set(key, value) return value async def decrement(self, key: str, delta: int = 1, default: int = 0) -> int: value = default if await self.has(key): value = await self.get(key) value = value - delta await self.set(key, value) return value def make_key(self, key: str) -> str: if self.prefix is None: return key return f'{self.prefix}_{key}' PK!ihĺkolas_cache/backends/dummy.pyfrom typing import Any from kolas_cache import Cache from kolas_cache.support import CacheURL class DummyCache(Cache): # pragma: no cover async def has(self, key: str) -> bool: return False async def set(self, key: str, value: Any, seconds: int = Cache.DEFAULT_TIMEOUT) -> None: pass async def get(self, key: str, default: Any = None) -> Any: return None async def flush(self) -> None: pass async def forget(self, key: str) -> None: pass async def increment(self, key: str, delta: int = 1, default: int = 0) -> int: return 0 async def decrement(self, key: str, delta: int = 1, default: int = 0) -> int: return 0 def dummy_cache_factory(config: CacheURL) -> DummyCache: """ Dummy cache factory. Example DSN: `dummy://?prefix=prj_` """ return DummyCache(prefix=config.prefix) PK! kolas_cache/backends/file.pyimport asyncio import glob import os import pickle import time import zlib import aiofiles from typing import Any from kolas_cache import Cache from kolas_cache.support import CacheURL from kolas_cache.utils import run_in_threadpool class FileSystemCache(Cache): suffix = '.cache' def __init__(self, path: str, prefix: str = None): super().__init__(prefix) self.path = path async def set(self, key: str, value: Any, seconds: int = Cache.DEFAULT_TIMEOUT) -> None: key = self.make_key(key) async with self.lock: async with aiofiles.open(self.make_filename(key), 'wb') as f: row = { 'contents': value, 'time': time.time(), 'expiry': seconds, } contents = zlib.compress( pickle.dumps(row, pickle.HIGHEST_PROTOCOL) ) await f.write(contents) async def get(self, key: str, default: Any = None) -> Any: key = self.make_key(key) async with self.lock: path = self.make_filename(key) if not await run_in_threadpool(os.path.exists, path): return default async with aiofiles.open(path, 'rb') as f: contents = await f.read() row = pickle.loads(zlib.decompress(contents)) max_time = row['time'] + row['expiry'] if max_time > time.time(): # expire in the future return row['contents'] return default async def flush(self) -> None: async with self.lock: files = await run_in_threadpool( glob.glob, f'{self.path}/*{self.suffix}' ) await asyncio.gather(*[ run_in_threadpool(os.remove, os.path.join(f)) for f in files ]) async def forget(self, key: str) -> None: key = self.make_key(key) async with self.lock: path = self.make_filename(key) await run_in_threadpool(os.remove, path) def make_filename(self, key: str) -> str: return os.path.abspath( os.path.join(self.path, f'{key}{self.suffix}') ) def filesystem_cache_factory(config: CacheURL) -> FileSystemCache: """ Filesystem cache factory. Example DSN: `file:///tmp/caches/?prefix=prj_` Note triple slashed here! """ return FileSystemCache(config.path, config.prefix) PK!-V0  !kolas_cache/backends/memcached.pyimport pickle import zlib from typing import Any from kolas_cache import Cache from kolas_cache.support import CacheURL try: import aiomcache except ImportError: # pragma: nocover aiomcache = None class MemcachedCache(Cache): def __init__(self, client: aiomcache.Client, prefix: str = None): super().__init__(prefix) self.client = client async def set(self, key: str, value: Any, seconds: int = Cache.DEFAULT_TIMEOUT) -> None: key = self.make_key(key) async with self.lock: contents = zlib.compress( pickle.dumps(value, pickle.HIGHEST_PROTOCOL) ) await self.client.set(key, contents, seconds) async def get(self, key: str, default: Any = None) -> Any: key = self.make_key(key) async with self.lock: contents = await self.client.get(key, ) if contents: return pickle.loads(zlib.decompress(contents)) async def flush(self) -> None: async with self.lock: await self.client.flush_all() async def forget(self, key: str) -> None: key = self.make_key(key) async with self.lock: await self.client.delete(key) def make_key(self, key: str) -> bytes: return super().make_key(key).encode('utf8') def memcached_cache_factory(config: CacheURL) -> MemcachedCache: """ Redis cache factory. Example DSN: `memcached://localhost:11211/0?prefix=prj_&minsize=1&maxsize=10` """ minsize = config.query.get('minsize', ) maxsize = config.query.get('maxsize', ) client = aiomcache.Client( host=config.hostname, port=config.port, pool_minsize=minsize, pool_size=maxsize, ) return MemcachedCache(client, config.prefix) PK!qffkolas_cache/backends/memory.pyfrom datetime import datetime, timedelta from typing import Any from kolas_cache import Cache from kolas_cache.support import CacheURL class InMemoryCache(Cache): def __init__(self, prefix: str = None): super().__init__(prefix) self._storage = {} async def set(self, key: str, value: Any, seconds: int = Cache.DEFAULT_TIMEOUT) -> None: key = self.make_key(key) async with self.lock: _value = (value, datetime.now() + timedelta(seconds=seconds)) self._storage[key] = _value async def get(self, key: str, default: Any = None) -> Any: key = self.make_key(key) async with self.lock: if key not in self._storage: return default value, expires = self._storage[key] if datetime.now() > expires: return None return value async def flush(self) -> None: async with self.lock: self._storage = {} async def forget(self, key: str) -> None: key = self.make_key(key) async with self.lock: if key in self._storage: del self._storage[key] def inmemory_cache_factory(config: CacheURL) -> InMemoryCache: """ In memory cache factory. Example DSN: `memory://?prefix=prj_` """ return InMemoryCache(prefix=config.prefix) PK!;gD||kolas_cache/backends/redis.pyimport pickle import zlib import aioredis from typing import Any from kolas_cache import Cache from kolas_cache.support import CacheURL class RedisCache(Cache): def __init__(self, client: aioredis.Redis, prefix: str = None): super().__init__(prefix) self.client = client async def set(self, key: str, value: Any, seconds: int = Cache.DEFAULT_TIMEOUT) -> None: key = self.make_key(key) async with self.lock: contents = zlib.compress( pickle.dumps(value, pickle.HIGHEST_PROTOCOL) ) await self.client.setex(key, seconds * 1000, contents) async def get(self, key: str, default: Any = None) -> Any: key = self.make_key(key) async with self.lock: contents = await self.client.get(key, ) if contents: return pickle.loads(zlib.decompress(contents)) async def flush(self) -> None: async with self.lock: await self.client.execute('FLUSHDB') async def forget(self, key: str) -> None: key = self.make_key(key) async with self.lock: await self.client.execute('DEL', key) def redis_cache_factory(config: CacheURL) -> RedisCache: """ Redis cache factory. Example DSN: `redis://user:password@localhost:6379/0?prefix=prj_&minsize=1&maxsize=10` DSN defaults to `redis://localhost:6379/0` """ minsize = int(config.query.get('minsize', 1)) maxsize = int(config.query.get('maxsize', 10)) secret = config.username or '' hostname = config.hostname or 'localhost' port = int(config.port or 6379) database = config.path[1:] or '0' address = '%s:%d' % (hostname, port) pool = aioredis.ConnectionsPool( address, database, secret, minsize=minsize, maxsize=maxsize ) redis = aioredis.Redis(pool) return RedisCache(redis, config.prefix) PK!dݎkolas_cache/factory.pyimport importlib from typing import Union, Callable, Dict, Type from kolas_cache.backends.base import Cache from kolas_cache.support import CacheURL CacheFactory = Union[str, Callable[[CacheURL], Cache]] class CacheFactories: known_backends: Dict[str, CacheFactory] = { 'dummy': 'kolas_cache.backends.dummy.dummy_cache_factory', 'memory': 'kolas_cache.backends.memory.inmemory_cache_factory', 'file': 'kolas_cache.backends.file.filesystem_cache_factory', 'redis': 'kolas_cache.backends.redis.redis_cache_factory', 'memcached': 'kolas_cache.backends.memcached.memcached_cache_factory', } @classmethod def register(cls, protocol: str, klass: Type[Cache]): CacheFactories.known_backends[protocol] = klass @classmethod def get_factory(cls, backend: str) -> CacheFactory: if backend not in CacheFactories.known_backends: raise ValueError(f'{backend} is not a valid cache backend.') factory = CacheFactories.known_backends.get(backend) if isinstance(factory, str): module_name, *_, factory_fn = factory.rpartition('.') factory = getattr( importlib.import_module(module_name), factory_fn ) return factory def create_cache(dsn: str) -> Cache: config = CacheURL(dsn) factory = CacheFactories.get_factory(config.backend) return factory(config) PK!&kolas_cache/manager.pyfrom typing import Dict from kolas_cache import Cache class CacheManager: def __init__(self, caches: Dict[str, Cache] = None): self._caches = caches or {} def add(self, cache: Cache, name: str = 'default'): self._caches[name] = cache def get(self, name: str) -> Cache: return self._caches[name] def has(self, name: str) -> bool: return name in self._caches def __setitem__(self, name: str, cache: Cache): return self.add(cache, name) def __getitem__(self, name: str) -> Cache: return self._caches[name] def __contains__(self, name: str) -> bool: return self.has(name) PK!^^kolas_cache/support.pyimport urllib.parse class CacheURL: def __init__(self, dsn: str): self.url = dsn self._url_parts: urllib.parse.SplitResult = urllib.parse.urlsplit(dsn) @property def backend(self): return self._url_parts.scheme @property def hostname(self): return self._url_parts.hostname @property def port(self): return self._url_parts.port @property def username(self): return self._url_parts.username @property def password(self): return self._url_parts.password @property def path(self): return self._url_parts.path @property def query(self): params = urllib.parse.parse_qsl(self._url_parts.query) return {name: value for name, value in params} @property def prefix(self): return self.query.get('prefix', ) PK!|Kkolas_cache/utils.pyimport asyncio import contextvars import functools import typing async def run_in_threadpool( func: typing.Callable, *args: typing.Any, **kwargs: typing.Any ) -> typing.Any: loop = asyncio.get_event_loop() if contextvars is not None: # pragma: no cover # Ensure we run in the same context child = functools.partial(func, *args, **kwargs) context = contextvars.copy_context() func = context.run args = (child,) elif kwargs: # pragma: no cover # loop.run_in_executor doesn't accept 'kwargs', so bind them in here func = functools.partial(func, **kwargs) return await loop.run_in_executor(None, func, *args) PK!HڽTU!kolas_cache-0.1.0.dist-info/WHEEL A н#Z;/"d&F[xzw@Zpy3Fv]\fi4WZ^EgM_-]#0(q7PK!H*u܌s$kolas_cache-0.1.0.dist-info/METADATAJ1yyZ+ZGtޣ.W } 3Y)y=#v|I`$Kh,eB.)l>rԆQ "W9ta >O{ g4ΊmvA.+hreZMn 7b)-Q%ȓF5HcX%i D`hMG`@+ݶz%2x4+1wLQ_1Йѻb$-PEYQD'l*R" DkB ׉).8:6?"߷px٫5o+қh/ig/ӡ 'W0Ca&p7uKshO5e~ڧsCMqMcݰўS!# Y-tDvlh''ebv=8dŶ0f;7M%2=R4TKQY''/Uqە%g=>/ 23+je: ,B,Q斚sL`jwϯI>))P\~fT-oՌ"n~:p