PKƾNa&lllhappyly/__init__.py"""Conveniently separate your business logic from messaging stuff.""" # flake8: noqa F401 import logging __version__ = '0.8.0' from .listening import Executor, BaseListener from .schemas import Schema from .caching import Cacher from .serialization import Serializer, Deserializer from .handling import Handler, DUMMY_HANDLER from .exceptions import StopPipeline def _welcome(): import sys sys.stdout.write(f'Using happyly v{__version__}.\n') def _setup_warnings(): import warnings for warning_type in PendingDeprecationWarning, DeprecationWarning: warnings.filterwarnings( 'always', category=warning_type, module=r'^{0}\.'.format(__name__) ) def _setup_logging(): logging.getLogger(__name__).setLevel(logging.INFO) _welcome() _setup_warnings() _setup_logging() del _welcome del _setup_warnings del _setup_logging PKґNh happyly/exceptions.pyfrom attr import attrs @attrs(auto_attribs=True, auto_exc=True) # type: ignore class StopPipeline(Exception): """ This exception should be raised to stop a pipeline. After raising it, :meth:`Executor.on_stopped` will be called. """ reason: str = '' @attrs(auto_exc=True) # type: ignore class FetchedNoResult(Exception): """ Exception thrown by :meth:`Executor.run_for_result` when it is unable to fetch a result """ pass PKOUN!happyly/_deprecations/__init__.pyPKOUNRrTThappyly/_deprecations/utils.pyimport warnings def will_be_removed( deprecated_name: str, use_instead, removing_in_version: str, stacklevel=2 ): warnings.warn( f"Please use {use_instead.__name__} instead, " f"{deprecated_name} will be removed in happyly v{removing_in_version}.", DeprecationWarning, stacklevel=stacklevel, ) PKOUNv))happyly/caching/__init__.pyfrom .cacher import Cacher # noqa: F401 PKOUN6yyhappyly/caching/cacher.pyfrom abc import ABC, abstractmethod from typing import Any _no_default_impl = NotImplementedError('No default implementation for class Cacher') class Cacher(ABC): """ Abstract base class which defines interface of any caching component to be used via :class:`.CacheByRequestIdMixin` or similar mixin. """ @abstractmethod def add(self, data: Any, key: str): """ Add the provided data to cache and store it by the provided key. """ raise _no_default_impl @abstractmethod def remove(self, key: str): """ Remove data from cache which is stored by the provided key. """ raise _no_default_impl @abstractmethod def get(self, key: str): """ Returns data which is stored in cache by the provided key. """ raise _no_default_impl PKґN6nhappyly/caching/mixins.pyimport json from typing import Any, Mapping, Optional from happyly.caching.cacher import Cacher class CacheByRequestIdMixin: """ Mixin which adds caching functionality to Listener. Utilizes notions of listener's topic and request id of message -- otherwise will not work. To be used via multiple inheritance. For example, given some component `SomeListener` you can define its caching equivalent by defining `SomeCachedListener` which inherits from both `SomeListener` and :class:`.CacheByRequestIdMixin`. """ def __init__(self, cacher: Cacher): self.cacher = cacher def on_received(self, message: Any): super().on_received(message) try: req_id = self._get_req_id(message) except Exception: pass else: data = json.dumps( {'topic': self.from_topic, 'data': json.loads(message.data)} ) self.cacher.add(data, key=req_id) def _get_req_id(self, message: Any) -> str: assert self.deserializer is not None attribtues = self.deserializer.deserialize(message) return attribtues[self.deserializer.request_id_field] def _rm(self, parsed_message: Mapping[str, Any]): assert self.deserializer is not None self.cacher.remove(parsed_message[self.deserializer.request_id_field]) def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result ): super().on_published(original_message, parsed_message, result) if parsed_message is not None: self._rm(parsed_message) def on_deserialization_failed(self, message: Any, error: Exception): super().on_deserialization_failed(message, error) try: req_id = self._get_req_id(message) except Exception: pass else: self.cacher.remove(key=req_id) PKґNE!happyly/google_pubsub/__init__.py# flake8: noqa F401 from .high_level import ( GoogleSimpleSender, GoogleSimpleReceiver, GoogleReceiveAndReplyComponent, GoogleSimpleReceiveAndReply, GoogleCachedReceiveAndReply, GoogleCachedReceiver, GoogleLateAckReceiver, GoogleLateAckReceiveAndReply, GoogleBaseReceiver, GoogleBaseReceiveAndReply, ) from .redis_cacher import RedisCacher from .deserializers import JSONDeserializerWithRequestIdRequired from .publishers import GooglePubSubPublisher from .subscribers import GooglePubSubSubscriber PKOUNdj&happyly/google_pubsub/deserializers.pyfrom typing import Mapping, Any import json from attr import attrs import marshmallow from happyly.serialization import Deserializer @attrs(auto_attribs=True, frozen=True) class JSONDeserializerWithRequestIdRequired(Deserializer): """ Deserializer for Google Pub/Sub messages which expects a message of certain schema to be written in `message.data` as JSON encoded into binary data with utf-8. Schema used with this serializer must define some field which is used as request id (you can specify which one in constructor). If `JSONDeserializerWithRequestIdRequired` fails to deserialize some message, you can use `build_error_result` to fetch request id and provide error message. """ schema: marshmallow.Schema request_id_field: str = 'request_id' status_field: str = 'status' error_field: str = 'error' _status_error: str = 'ERROR' def deserialize(self, message: Any) -> Mapping[str, Any]: """ Loads message attributes from `message.data`, expects it to be a JSON which corresponds `self.schema` encoded with utf-8. """ data = message.data.decode('utf-8') deserialized, _ = self.schema.loads(data) return deserialized def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: """ Provides a fallback result when `deserialize` fails. Returns a dict with attributes: * * * Field names can be specified in constructor. If request id cannot be fetched, it is set to an empty string. """ attributes = json.loads(message.data) try: return { self.request_id_field: attributes[self.request_id_field], self.status_field: self._status_error, self.error_field: repr(error), } except KeyError as e: return { self.request_id_field: '', self.status_field: self._status_error, self.error_field: f'{repr(e)}: ' f'Message contains no {self.request_id_field}', } PKґN2m<#happyly/google_pubsub/publishers.pyfrom typing import Any from google.cloud import pubsub_v1 from happyly.pubsub import BasePublisher class GooglePubSubPublisher(BasePublisher): def publish(self, serialized_message: Any): future = self._publisher_client.publish( f'projects/{self.project}/topics/{self.to_topic}', serialized_message ) try: future.result() return except Exception as e: raise e def __init__(self, project: str, to_topic: str): super().__init__() self.project = project self.to_topic = to_topic self._publisher_client = pubsub_v1.PublisherClient() PKOUNץrr%happyly/google_pubsub/redis_cacher.pyimport logging from happyly.caching.cacher import Cacher _LOGGER = logging.getLogger(__name__) class RedisCacher(Cacher): def __init__(self, host: str, port: int, prefix: str = ''): try: import redis except ImportError as e: raise ImportError('Please install redis>=3.0 to use this feature.') from e self.prefix = prefix self.client = redis.StrictRedis(host=host, port=port) _LOGGER.info( f'Cache was successfully initialized with Redis client ({host}:{port})' ) if self.prefix != '': _LOGGER.info(f'Using prefix {self.prefix}') def add(self, data: str, key: str): self.client.hset(self.prefix, key, data) _LOGGER.info(f'Cached message with id {key}') def remove(self, key: str): self.client.hdel(self.prefix, key) _LOGGER.info(f'Message with id {key} was removed from cache') def get(self, key: str): self.client.hget(self.prefix, key) def get_all(self): keys = self.client.hkeys(self.prefix) return [self.client.hget(self.prefix, k) for k in keys] PKOUNVX$happyly/google_pubsub/subscribers.pyimport logging from typing import Callable, Any from attr import attrs, attrib from google.cloud import pubsub_v1 from happyly.pubsub import SubscriberWithAck _LOGGER = logging.getLogger(__name__) @attrs(auto_attribs=True) class GooglePubSubSubscriber(SubscriberWithAck): project: str subscription_name: str _subscription_client: pubsub_v1.SubscriberClient = attrib(init=False) _subscription_path: str = attrib(init=False) def __attrs_post_init__(self): s = pubsub_v1.SubscriberClient() self._subscription_path = s.subscription_path( self.project, self.subscription_name ) self._subscription_client = s def subscribe(self, callback: Callable[[Any], Any]): _LOGGER.info(f'Starting to listen to {self.subscription_name}') return self._subscription_client.subscribe(self._subscription_path, callback) def ack(self, message): message.ack() PKOUN,happyly/google_pubsub/high_level/__init__.py# flake8: noqa F401 from .simple import ( GoogleSimpleSender, GoogleSimpleReceiver, GoogleSimpleReceiveAndReply, GoogleReceiveAndReplyComponent, ) from .with_cache import GoogleCachedReceiveAndReply, GoogleCachedReceiver from .late_ack import GoogleLateAckReceiver, GoogleLateAckReceiveAndReply from .early_ack import GoogleEarlyAckReceiver, GoogleEarlyAckReceiveAndReply from .base import GoogleBaseReceiver, GoogleBaseReceiveAndReply PKґN3  (happyly/google_pubsub/high_level/base.pyimport logging from typing import Optional, Union, Any, Mapping import marshmallow from happyly.logs.request_id import RequestIdLogger from happyly.serialization import DUMMY_SERDE from happyly.serialization.json import BinaryJSONSerializerForSchema from ..subscribers import GooglePubSubSubscriber from ..deserializers import JSONDeserializerWithRequestIdRequired from ..publishers import GooglePubSubPublisher from happyly import Handler, Serializer from happyly.listening.listener import ListenerWithAck _LOGGER = logging.getLogger(__name__) def _format_message(message): return f'data: {message.data}, attributes: {message.attributes}' class _BaseGoogleListenerWithRequestIdLogger( ListenerWithAck[ JSONDeserializerWithRequestIdRequired, Union[None, GooglePubSubPublisher], Serializer, ] ): """ Introduces advanced logging based on topic and request id. """ def __init__( self, subscriber: GooglePubSubSubscriber, handler: Handler, deserializer: JSONDeserializerWithRequestIdRequired, serializer: BinaryJSONSerializerForSchema = None, publisher: Optional[GooglePubSubPublisher] = None, from_topic: str = '', ): self.from_topic = from_topic super().__init__( subscriber=subscriber, publisher=publisher, handler=handler, deserializer=deserializer, serializer=serializer if serializer is not None else DUMMY_SERDE, ) def on_received(self, original_message: Any): logger = RequestIdLogger(_LOGGER, self.from_topic) logger.info(f"Received message: {_format_message(original_message)}") def on_deserialized( self, original_message: Any, deserialized_message: Mapping[str, Any] ): assert self.deserializer is not None request_id = deserialized_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.debug( f"Message successfully deserialized into attributes: {deserialized_message}" ) def on_deserialization_failed(self, original_message: Any, error: Exception): logger = RequestIdLogger(_LOGGER, self.from_topic) logger.exception( f"Was not able to deserialize the following message: " f"{_format_message(original_message)}" ) def on_handled( self, original_message: Any, deserialized_message: Mapping[str, Any], result ): assert self.deserializer is not None request_id = deserialized_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f"Message handled, result {result}") def on_handling_failed( self, original_message: Any, deserialized_message: Mapping[str, Any], error: Exception, ): assert self.deserializer is not None request_id = deserialized_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f'Failed to handle message, error {error}') def on_published( self, original_message: Any, deserialized_message: Optional[Mapping[str, Any]], result, serialized_message, ): assert self.deserializer is not None request_id = '' if deserialized_message is not None: request_id = deserialized_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f"Published serialized result: {serialized_message}") def on_publishing_failed( self, original_message: Any, deserialized_message: Optional[Mapping[str, Any]], result, serialized_message, error: Exception, ): assert self.deserializer is not None request_id = '' if deserialized_message is not None: request_id = deserialized_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.exception(f"Failed to publish result: {serialized_message}") def on_acknowledged(self, message: Any): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) logger.info('Message acknowledged.') def on_finished(self, original_message: Any, error: Optional[Exception]): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(original_message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) logger.info('Pipeline execution finished.') def on_stopped(self, original_message: Any, reason: str = ''): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(original_message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) s = "." if reason == "" else f" due to the reason: {reason}." logger.info(f'Stopped pipeline{s}') class GoogleBaseReceiver(_BaseGoogleListenerWithRequestIdLogger): def __init__( self, input_schema: marshmallow.Schema, from_subscription: str, project: str, handler: Handler, from_topic: str = '', ): subscriber = GooglePubSubSubscriber( project=project, subscription_name=from_subscription ) deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) super().__init__( subscriber=subscriber, handler=handler, deserializer=deserializer, from_topic=from_topic, ) class GoogleBaseReceiveAndReply(_BaseGoogleListenerWithRequestIdLogger): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, output_schema: marshmallow.Schema, to_topic: str, project: str, from_topic: str = '', ): subscriber = GooglePubSubSubscriber( project=project, subscription_name=from_subscription ) deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) serializer = BinaryJSONSerializerForSchema(schema=output_schema) publisher = GooglePubSubPublisher(project=project, to_topic=to_topic) super().__init__( handler=handler, deserializer=deserializer, subscriber=subscriber, serializer=serializer, publisher=publisher, from_topic=from_topic, ) PK7N -happyly/google_pubsub/high_level/early_ack.pyfrom typing import Optional, Any from .base import GoogleBaseReceiver, GoogleBaseReceiveAndReply class GoogleEarlyAckReceiver(GoogleBaseReceiver): def _fetch_deserialized_and_result(self, message: Optional[Any]): self.ack(message) super()._fetch_deserialized_and_result(message) class GoogleEarlyAckReceiveAndReply(GoogleBaseReceiveAndReply): def _fetch_deserialized_and_result(self, message: Optional[Any]): self.ack(message) super()._fetch_deserialized_and_result(message) PKOUN2R..,happyly/google_pubsub/high_level/late_ack.pyfrom typing import Optional, Any from ..high_level.base import GoogleBaseReceiver, GoogleBaseReceiveAndReply class GoogleLateAckReceiver(GoogleBaseReceiver): def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) class GoogleLateAckReceiveAndReply(GoogleBaseReceiveAndReply): def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) PKґNJ6zz*happyly/google_pubsub/high_level/simple.pyfrom typing import Union, Optional import marshmallow from happyly._deprecations.utils import will_be_removed from .early_ack import GoogleEarlyAckReceiver, GoogleEarlyAckReceiveAndReply from happyly.handling.dummy_handler import DUMMY_HANDLER from ..deserializers import JSONDeserializerWithRequestIdRequired from ..publishers import GooglePubSubPublisher from happyly.serialization.json import BinaryJSONSerializerForSchema from happyly.handling import Handler from happyly.listening.executor import Executor class GoogleSimpleSender( Executor[ Union[None, JSONDeserializerWithRequestIdRequired], GooglePubSubPublisher, BinaryJSONSerializerForSchema, ] ): def __init__( self, output_schema: marshmallow.Schema, to_topic: str, project: str, handler: Handler = DUMMY_HANDLER, input_schema: Optional[marshmallow.Schema] = None, ): if input_schema is None: deserializer = None else: deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) publisher = GooglePubSubPublisher(project=project, to_topic=to_topic) serializer = BinaryJSONSerializerForSchema(schema=output_schema) super().__init__( publisher=publisher, handler=handler, deserializer=deserializer, serializer=serializer, ) class GoogleSimpleReceiver(GoogleEarlyAckReceiver): def __init__(self, *args, **kwargs): will_be_removed('GoogleSimpleReceiver', GoogleEarlyAckReceiver, '0.8.0') super().__init__(*args, **kwargs) class GoogleSimpleReceiveAndReply(GoogleEarlyAckReceiveAndReply): def __init__(self, *args, **kwargs): will_be_removed( 'GoogleSimpleReceiveAndReply', GoogleEarlyAckReceiveAndReply, '0.8.0' ) super().__init__(*args, **kwargs) class GoogleReceiveAndReplyComponent(GoogleEarlyAckReceiveAndReply): def __init__(self, *args, **kwargs): will_be_removed( 'GoogleReceiveAndReplyComponent', GoogleEarlyAckReceiveAndReply, '0.8.0' ) super().__init__(*args, **kwargs) PKґNM/$$.happyly/google_pubsub/high_level/with_cache.pyimport marshmallow from .early_ack import GoogleEarlyAckReceiveAndReply, GoogleEarlyAckReceiver from happyly.caching.cacher import Cacher from happyly.caching.mixins import CacheByRequestIdMixin from happyly.handling import Handler class GoogleCachedReceiveAndReply(CacheByRequestIdMixin, GoogleEarlyAckReceiveAndReply): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, from_topic: str, output_schema: marshmallow.Schema, to_topic: str, project: str, cacher: Cacher, ): GoogleEarlyAckReceiveAndReply.__init__( self, handler=handler, input_schema=input_schema, from_subscription=from_subscription, output_schema=output_schema, to_topic=to_topic, project=project, from_topic=from_topic, ) CacheByRequestIdMixin.__init__(self, cacher) class GoogleCachedReceiver(CacheByRequestIdMixin, GoogleEarlyAckReceiver): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, from_topic: str, project: str, cacher: Cacher, ): GoogleEarlyAckReceiver.__init__( self, handler=handler, input_schema=input_schema, from_subscription=from_subscription, project=project, from_topic=from_topic, ) CacheByRequestIdMixin.__init__(self, cacher) PKґN.bbhappyly/handling/__init__.pyfrom .handler import Handler # noqa: F401 from .dummy_handler import DUMMY_HANDLER # noqa: F401 PKOUNBK!happyly/handling/dummy_handler.pyfrom typing import Mapping, Any from happyly.handling.handler import Handler class _DummyHandler(Handler): def handle(self, message: Mapping[str, Any]): return message def on_handling_failed(self, message: Mapping[str, Any], error: Exception): raise error DUMMY_HANDLER: _DummyHandler = _DummyHandler() """ Handler which just returns provided message attributes (kind of an "identity function") """ PKґN)happyly/handling/handler.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any, Optional _no_base_impl = NotImplementedError('No default implementation in base Handler class') class Handler(ABC): """ A class containing logic to handle a parsed message. """ @abstractmethod def handle(self, message: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: """ Applies logic using a provided message, optionally gives back one or more results. Each result consists of message attributes which can be serialized and sent. When fails, calls :meth:`on_handling_failed` :param message: A parsed message as a dictionary of attributes :return: None if no result is extracted from handling, a dictionary of attributes for single result """ raise _no_base_impl @abstractmethod def on_handling_failed( self, message: Mapping[str, Any], error: Exception ) -> Optional[Mapping[str, Any]]: """ Applies fallback logic using a provided message when :meth:`handle` fails, optionally gives back one or more results. Enforces users of :class:`Handler` class to provide explicit strategy for errors. If you want to propagate error further to the underlying Executor/Handler, just re-raise an `error` here:: def on_handling_failed(self, message, error): raise error :param message: A parsed message as a dictionary of attributes :param error: Error raised by :meth:`handle` :return: None if no result is extracted from handling, a dictionary of attributes for single result """ raise _no_base_impl def __call__(self, message: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: try: return self.handle(message) except Exception as e: return self.on_handling_failed(message, e) PK7NAP!happyly/listening/__init__.py# flake8: noqa F401 from .listener import BaseListener, EarlyAckListener, LateAckListener, ListenerWithAck from .executor import Executor PKґNgJ>>happyly/listening/executor.pyimport logging from collections import namedtuple from types import FunctionType from typing import Mapping, Any, Optional, TypeVar, Generic, Tuple, Union, Callable from happyly.exceptions import StopPipeline, FetchedNoResult from happyly.handling.dummy_handler import DUMMY_HANDLER from happyly.handling import Handler from happyly.serialization.deserializer import Deserializer from happyly.serialization.serializer import Serializer from happyly.pubsub import BasePublisher from happyly.serialization import DUMMY_SERDE _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=BasePublisher) SE = TypeVar("SE", bound=Serializer) _Result = Optional[Mapping[str, Any]] ResultAndDeserialized = namedtuple('ResultAndDeserialized', 'result deserialized') HandlerClsOrFn = Union[Handler, Callable[[Mapping[str, Any]], _Result]] def _deser_converter(deserializer: Union[Deserializer, Callable]): if isinstance(deserializer, FunctionType): return Deserializer.from_function(deserializer) elif isinstance(deserializer, Deserializer): return deserializer else: raise TypeError def _publ_converter(publisher: Union[BasePublisher, Callable]): if isinstance(publisher, FunctionType): return BasePublisher.from_function(publisher) elif isinstance(publisher, BasePublisher): return publisher else: raise TypeError def _ser_converter(serializer: Union[Serializer, Callable]): if isinstance(serializer, FunctionType): return Serializer.from_function(serializer) elif isinstance(serializer, Serializer): return serializer else: raise TypeError class Executor(Generic[D, P, SE]): """ Component which is able to run handler as a part of more complex pipeline. Implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and introduces callbacks between the stages which can be easily overridden. Executor does not implement stages themselves, it takes internal implementation of stages from corresponding components: :class:`Handler`, :class:`Deserializer`, :class:`Publisher`. It means that :class:`Executor` is universal and can work with any serialization/messaging technology depending on concrete components provided to executor's constructor. """ handler: HandlerClsOrFn """ Provides implementation of handling stage to Executor. """ deserializer: D # Why type:ignore? Because DUMMY_SERDE is a subclass of Deserializer # but not necessarily subclass of whatever D will be in runtime. """ Provides implementation of deserialization stage to Executor. If not present, no deserialization is performed. """ publisher: Optional[P] """ Provides implementation of serialization and publishing stages to Executor. If not present, no publishing is performed. """ serializer: SE def __init__( self, handler: HandlerClsOrFn = DUMMY_HANDLER, deserializer: Optional[Union[D, Callable]] = None, publisher: Optional[Union[P, Callable]] = None, serializer: Optional[Union[SE, Callable]] = None, ): self.handler = handler # type: ignore if deserializer is None: self.deserializer = DUMMY_SERDE # type: ignore else: self.deserializer = _deser_converter(deserializer) if publisher is None: self.publisher = None else: self.publisher = _publ_converter(publisher) if serializer is None: self.serializer = DUMMY_SERDE # type: ignore else: self.serializer = _ser_converter(serializer) def on_received(self, original_message: Any): """ Callback which is called as soon as pipeline is run. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization """ _LOGGER.info(f"Received message: {original_message}") def on_deserialized( self, original_message: Any, deserialized_message: Mapping[str, Any] ): """ Callback which is called right after message was deserialized successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param deserialized_message: Message attributes after deserialization """ _LOGGER.info( 'Message successfully deserialized into attributes: ' f'{deserialized_message}' ) def on_deserialization_failed(self, original_message: Any, error: Exception): """ Callback which is called right after deserialization failure. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param error: exception object which was raised """ _LOGGER.exception('') _LOGGER.error( f"Was not able to deserialize the following message: {original_message}" ) def on_handled( self, original_message: Any, deserialized_message: Mapping[str, Any], result: Optional[Mapping[str, Any]], ): """ Callback which is called right after message was handled (successfully or not, but without raising an exception). Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param deserialized_message: Message attributes after deserialization :param result: Result fetched from handler """ _LOGGER.info(f"Message handled, result: {result}.") def on_handling_failed( self, original_message: Any, deserialized_message: Mapping[str, Any], error: Exception, ): """ Callback which is called if handler's `on_handling_failed` raises an exception. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param deserialized_message: Message attributes after deserialization :param error: exception object which was raised """ _LOGGER.exception('') _LOGGER.error(f'Handler raised an exception.') def on_serialized( self, original_message: Any, deserialized_message: Optional[Mapping[str, Any]], result: _Result, serialized_message: Any, ): _LOGGER.debug('Serialized message.') def on_serialization_failed( self, original: Any, deserialized: Optional[Mapping[str, Any]], result: _Result, error: Exception, ): _LOGGER.exception('') _LOGGER.error('Was not able to deserialize message.') def on_published( self, original_message: Any, deserialized_message: Optional[Mapping[str, Any]], result: _Result, serialized_message: Any, ): """ Callback which is called right after message was published successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param deserialized_message: Message attributes after deserialization :param result: Result fetched from handler """ _LOGGER.info(f"Published result: {result}") def on_publishing_failed( self, original_message: Any, deserialized_message: Optional[Mapping[str, Any]], result: _Result, serialized_message: Any, error: Exception, ): """ Callback which is called when publisher fails to publish. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param deserialized_message: Message attributes after deserialization :param result: Result fetched from handler :param error: exception object which was raised """ _LOGGER.exception('') _LOGGER.error(f"Failed to publish result: {result}") def on_finished(self, original_message: Any, error: Optional[Exception]): """ Callback which is called when pipeline finishes its execution. Is guaranteed to be called unless pipeline is stopped via StopPipeline. :param original_message: Message as it has been received, without any deserialization :param error: exception object which was raised or None """ _LOGGER.info('Pipeline execution finished.') def on_stopped(self, original_message: Any, reason: str = ''): """ Callback which is called when pipeline is stopped via :exc:`.StopPipeline` :param original_message: Message as it has been received, without any deserialization :param reason: message describing why the pipeline stopped """ s = "." if reason == "" else f" due to the reason: {reason}." _LOGGER.info(f'Stopped pipeline{s}') def _try_publish( self, original: Any, parsed: Optional[Mapping[str, Any]], result: _Result, serialized: Any, ): assert self.publisher is not None try: self.publisher.publish(serialized) except Exception as e: self.on_publishing_failed( original_message=original, deserialized_message=parsed, result=result, serialized_message=serialized, error=e, ) raise e from e else: self.on_published( original_message=original, deserialized_message=parsed, result=result, serialized_message=serialized, ) def _fetch_deserialized_and_result( self, message: Optional[Any] ) -> ResultAndDeserialized: try: deserialized = self._deserialize(message) except StopPipeline as e: raise e from e except Exception as e: retval = ResultAndDeserialized( result=self._build_error_result(message, e), deserialized=None ) return retval retval = ResultAndDeserialized( result=self._handle(message, deserialized), deserialized=deserialized ) return retval def _deserialize(self, message: Optional[Any]): try: deserialized = self.deserializer.deserialize(message) except Exception as e: self.on_deserialization_failed(original_message=message, error=e) raise e from e else: self.on_deserialized( original_message=message, deserialized_message=deserialized ) return deserialized def _build_error_result(self, message: Any, error: Exception): try: error_result = self.deserializer.build_error_result(message, error) except Exception as new_e: _LOGGER.exception('') _LOGGER.error("Deserialization failed and error result cannot be built.") raise new_e from new_e return error_result def _handle(self, message: Optional[Any], deserialized: Mapping[str, Any]): try: result = self.handler(deserialized) # type: ignore except Exception as e: self.on_handling_failed( original_message=message, deserialized_message=deserialized, error=e ) raise e from e self.on_handled( original_message=message, deserialized_message=deserialized, result=result ) return result def _serialize( self, original_message: Optional[Any], parsed_message: Optional[Mapping[str, Any]], result: Mapping[str, Any], ) -> Any: try: serialized = self.serializer.serialize(result) except Exception as e: self.on_serialization_failed( original=original_message, deserialized=parsed_message, result=result, error=e, ) else: self.on_serialized( original_message=original_message, deserialized_message=parsed_message, result=result, serialized_message=serialized, ) return serialized def _run_core( self, message: Optional[Any] = None ) -> Tuple[Optional[Mapping[str, Any]], _Result, Optional[Any]]: self.on_received(message) result, deserialized = self._fetch_deserialized_and_result(message) if result is not None: serialized = self._serialize(message, deserialized, result) else: serialized = None return deserialized, result, serialized def run(self, message: Optional[Any] = None): """ Method that starts execution of pipeline stages. To stop the pipeline raise StopPipeline inside any callback. :param message: Message as is, without deserialization. Or message attributes if the executor was instantiated with neither a deserializer nor a handler (useful to quickly publish message attributes by hand) """ try: deserialized, result, serialized = self._run_core(message) if self.publisher is not None and serialized is not None: assert result is not None # something is serialized, so there must be a result self._try_publish(message, deserialized, result, serialized) except StopPipeline as e: self.on_stopped(original_message=message, reason=e.reason) except Exception as e: self.on_finished(original_message=message, error=e) else: self.on_finished(original_message=message, error=None) def run_for_result(self, message: Optional[Any] = None): try: _, _, serialized = self._run_core(message) except StopPipeline as e: self.on_stopped(original_message=message, reason=e.reason) raise FetchedNoResult from e except Exception as e: self.on_finished(original_message=message, error=e) raise FetchedNoResult from e else: self.on_finished(original_message=message, error=None) return serialized if __name__ == '__main__': class StoppingExecutor(Executor): def on_deserialized( self, original_message: Any, deserialized_message: Mapping[str, Any] ): super().on_deserialized(original_message, deserialized_message) raise StopPipeline("the sky is very high") logging.basicConfig(level=logging.DEBUG) StoppingExecutor(lambda m: {'2': 42}).run() # type: ignore print(Executor(lambda m: {"spam": "eggs"}).run_for_result()) PKґNAJJhappyly/listening/listener.py""" :class:`~happyly.listening.listener.BaseListener` and its subclasses. Listener is a form of Executor which is able to run pipeline by an event coming from a subscription. """ import logging from typing import Any, TypeVar, Optional, Generic from happyly.serialization.serializer import Serializer from happyly.serialization.dummy import DUMMY_SERDE from happyly.handling import Handler from happyly.pubsub import BasePublisher from happyly.pubsub.subscriber import BaseSubscriber, SubscriberWithAck from happyly.serialization import Deserializer from .executor import Executor _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=BasePublisher) S = TypeVar("S", bound=BaseSubscriber) SE = TypeVar("SE", bound=Serializer) class BaseListener(Executor[D, P, SE], Generic[D, P, SE, S]): """ Listener is a form of Executor which is able to run pipeline by an event coming from a subscription. Listener itself doesn't know how to subscribe, it subscribes via a provided subscriber. As any executor, implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and contains callbacks between the stages which can be easily overridden. As any executor, listener does not implement stages themselves, it takes internal implementation of stages from corresponding components: handler, deserializer, publisher. It means that listener is universal and can work with any serialization/messaging technology depending on concrete components provided to listener's constructor. """ subscriber: S """ Provides implementation of how to subscribe. """ def __init__( # type: ignore self, subscriber: S, handler: Handler, deserializer: D, serializer: SE = DUMMY_SERDE, # type: ignore publisher: Optional[P] = None, ): super().__init__( handler=handler, deserializer=deserializer, publisher=publisher, serializer=serializer, ) self.subscriber = subscriber def start_listening(self): return self.subscriber.subscribe(callback=self.run) class ListenerWithAck(BaseListener[D, P, SE, SubscriberWithAck], Generic[D, P, SE]): """ Acknowledge-aware listener. Defines :meth:`ListenerWithAck.ack` method. Subclass :class:`ListenerWithAck` and specify when to ack by overriding the corresponding callbacks. """ def __init__( # type: ignore self, subscriber: SubscriberWithAck, handler: Handler, deserializer: D, serializer: SE = DUMMY_SERDE, publisher: Optional[P] = None, ): super().__init__( handler=handler, deserializer=deserializer, serializer=serializer, publisher=publisher, subscriber=subscriber, ) def on_acknowledged(self, message: Any): """ Callback which is called write after message was acknowledged. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization """ _LOGGER.info('Message acknowledged') def ack(self, message: Any): """ Acknowledge the message using implementation from subscriber, then log success. :param message: Message as it has been received, without any deserialization """ self.subscriber.ack(message) self.on_acknowledged(message) class EarlyAckListener(ListenerWithAck[D, P, SE], Generic[D, P, SE]): """ Acknowledge-aware :class:`BaseListener`, which performs :meth:`.ack` right after :meth:`.on_received` callback is finished. """ def _fetch_deserialized_and_result(self, message: Optional[Any]): self.ack(message) super()._fetch_deserialized_and_result(message) class LateAckListener(ListenerWithAck[D, P, SE], Generic[D, P, SE]): """ Acknowledge-aware listener, which performs :meth:`.ack` at the very end of pipeline. """ def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) PKOUNhappyly/logs/__init__.pyPKOUN%%happyly/logs/base.pyfrom abc import ABC, abstractmethod _not_impl = NotImplementedError('No default implementation in base logger class') class BaseLogger(ABC): @abstractmethod def info(self, message: str): raise _not_impl @abstractmethod def debug(self, message: str): raise _not_impl @abstractmethod def warning(self, message: str): raise _not_impl @abstractmethod def exception(self, message: str): raise _not_impl @abstractmethod def error(self, message: str): raise _not_impl PKOUNhappyly/logs/mixins.pyPKOUN-happyly/logs/request_id.pyfrom logging import Logger from attr import attrs from .base import BaseLogger @attrs(auto_attribs=True) class RequestIdLogger(BaseLogger): logger: Logger topic: str = '' request_id: str = '' def _fmt(self, message): return f' {self.topic:>35} | {self.request_id:>40} |> {message}' def info(self, message: str): self.logger.info(self._fmt(message)) def debug(self, message: str): self.logger.debug(self._fmt(message)) def warning(self, message: str): self.logger.warning(self._fmt(message)) def exception(self, message: str): self.logger.exception(self._fmt(message)) def error(self, message: str): self.logger.error(self._fmt(message)) PKґN6G{{happyly/pubsub/__init__.pyfrom .publisher import BasePublisher # noqa: F401 from .subscriber import SubscriberWithAck, BaseSubscriber # noqa: F401 PKґNO}D99happyly/pubsub/publisher.pyfrom abc import ABC, abstractmethod from typing import Any, Callable class BasePublisher(ABC): @abstractmethod def publish(self, serialized_message: Any): raise NotImplementedError("No default implementation in base publisher class") @classmethod def from_function(cls, func: Callable[[Any], None]): def publish(self, serialized_message: Any): func(serialized_message) constructed_type = type( '__GeneratedPublisher', (BasePublisher,), {'publish': publish} ) return constructed_type() PK7NMSShappyly/pubsub/subscriber.pyfrom abc import ABC, abstractmethod from typing import Callable, Any class BaseSubscriber(ABC): @abstractmethod def subscribe(self, callback: Callable[[Any], Any]): raise NotImplementedError class SubscriberWithAck(BaseSubscriber, ABC): @abstractmethod def ack(self, message): raise NotImplementedError PKOUN_))happyly/schemas/__init__.pyfrom .schema import Schema # noqa: F401 PKOUNhappyly/schemas/schema.pyimport marshmallow class Schema(marshmallow.Schema): """ :doc:`Marshmallow ` schema, which raises errors on mismatch (extra fields provided also raise exception). Subclass it just like any marshmallow :class:`~marshmallow.Schema` to describe schema. Instantiation with no arguments is a good strict default, but you can pass any arguments valid for :class:`marshmallow.Schema` """ def __init__(self, *args, **kwargs): super().__init__(strict=True, *args, **kwargs) @marshmallow.validates_schema(pass_original=True) def check_unknown_fields(self, data, original_data): unknown = set(original_data) - set(self.fields) if unknown: raise marshmallow.ValidationError('Unknown field', unknown) PKґN6Mt!happyly/serialization/__init__.pyfrom .deserializer import Deserializer # noqa: F401 from .serializer import Serializer # noqa: F401 from .dummy import DUMMY_DESERIALIZER, DUMMY_SERDE, DummyValidator # noqa: F401 PKґNNw%happyly/serialization/deserializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any, Callable import marshmallow from attr import attrs _not_impl = NotImplementedError('No default implementation in base Deserializer class') class Deserializer(ABC): @abstractmethod def deserialize(self, message: Any) -> Mapping[str, Any]: raise _not_impl def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: raise error from error @classmethod def from_function(cls, func: Callable[[Any], Mapping[str, Any]]): def deserialize(self, message: Any) -> Mapping[str, Any]: return func(message) constructed_type = type( '__GeneratedDeserializer', (Deserializer,), {'deserialize': deserialize} ) return constructed_type() @attrs(auto_attribs=True, frozen=True) class DeserializerWithSchema(Deserializer, ABC): schema: marshmallow.Schema PKґN^whappyly/serialization/dummy.pyimport warnings from typing import Any, Mapping import marshmallow from attr import attrs from happyly.serialization import Serializer from .deserializer import Deserializer class DummySerde(Deserializer, Serializer): def _identity_transform(self, message): if self is DUMMY_DESERIALIZER: warnings.warn( "Please use DUMMY_SERDE instead, " "DUMMY_DESERIALIZER will be removed in happyly v0.9.0.", DeprecationWarning, stacklevel=2, ) if isinstance(message, Mapping): return message elif message is None: return {} else: raise ValueError( 'Dummy deserializer requires message attributes ' 'in form of dict-like structure as input' ) def serialize(self, message_attributes: Mapping[str, Any]) -> Any: return self._identity_transform(message_attributes) def deserialize(self, message) -> Mapping[str, Any]: return self._identity_transform(message) DUMMY_DESERIALIZER: DummySerde = DummySerde() DUMMY_SERDE: DummySerde = DummySerde() """ Serializer/deserializer which transforms message attributes to themselves """ @attrs(auto_attribs=True, frozen=True) class DummyValidator(Deserializer, Serializer): """ Serializer/deserializer which transforms message attributes to themselves along with validating against message schema. """ schema: marshmallow.Schema """ Schema which will be used to validate the provided message """ def _validate(self, message): errors = self.schema.validate(message) if errors != {}: raise marshmallow.ValidationError(str(errors)) def deserialize(self, message: Mapping[str, Any]) -> Mapping[str, Any]: self._validate(message) return message def serialize(self, message_attributes: Mapping[str, Any]) -> Mapping[str, Any]: self._validate(message_attributes) return message_attributes PKґNXhappyly/serialization/flask.pyfrom typing import Mapping, Any from attr import attrs from happyly.serialization.serializer import SerializerWithSchema from happyly.serialization import DummyValidator @attrs(auto_attribs=True) class JsonifyForSchema(SerializerWithSchema): def serialize(self, message_attributes: Mapping[str, Any]) -> Any: DummyValidator(schema=self.schema).serialize(message_attributes) # raises error is msg doesn't match schema import flask return flask.jsonify(message_attributes) PKґN-56happyly/serialization/json.pyimport json from typing import Any, Mapping from attr import attrs from happyly import Serializer, Deserializer from .deserializer import DeserializerWithSchema from .serializer import SerializerWithSchema class JSONSchemalessSerde(Serializer, Deserializer): """ Simple JSON serializer/deserializer which doesn't validate for any schema """ def serialize(self, message_attributes: Mapping[str, Any]) -> str: return json.dumps(message_attributes) def deserialize(self, message: str) -> Mapping[str, Any]: return json.loads(message) @attrs(auto_attribs=True) class JSONSerializerForSchema(SerializerWithSchema): def serialize(self, message_attributes: Mapping[str, Any]) -> Any: data, _ = self.schema.dumps(message_attributes) return data @attrs(auto_attribs=True) class JSONDeserializerForSchema(DeserializerWithSchema): def deserialize(self, message: Any) -> Mapping[str, Any]: deserialized, _ = self.schema.loads(message) return deserialized @attrs(auto_attribs=True) class BinaryJSONSerializerForSchema(SerializerWithSchema): def serialize(self, message_attributes: Mapping[str, Any]) -> Any: data, _ = self.schema.dumps(message_attributes) return data.encode('utf-8') @attrs(auto_attribs=True) class BinaryJSONDeserialierForSchema(DeserializerWithSchema): def deserialize(self, message: Any) -> Mapping[str, Any]: data = message.data.decode('utf-8') deserialized, _ = self.schema.loads(data) return deserialized PKґNZ.#happyly/serialization/serializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any, Callable import marshmallow from attr import attrs _no_default = NotImplementedError('No default implementation in base Serializer class') class Serializer(ABC): """ Abstract base class for Serializer. Provides :meth:`serialize` method which should be implemented by subclasses. """ @abstractmethod def serialize(self, message_attributes: Mapping[str, Any]) -> Any: raise _no_default @classmethod def from_function(cls, func: Callable[[Mapping[str, Any]], Any]): def serialize(self, message: Any) -> Mapping[str, Any]: return func(message) constructed_type = type( '__GeneratedSerializer', (Serializer,), {'serialize': serialize} ) return constructed_type() @attrs(auto_attribs=True, frozen=True) class SerializerWithSchema(Serializer, ABC): schema: marshmallow.Schema PKOUNN''happyly-0.8.0.dist-info/LICENSEMIT License Copyright (c) 2019 Equeum Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!HPOhappyly-0.8.0.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,szd&Y)r$[)T&UrPK!Hib  happyly-0.8.0.dist-info/METADATAXms۸_3wHډJq}d II@܏KR,"ox2Nj,D,Xd]/ez-v9JKT-J,McE8])Da:kJQYNu57IR+k7&2S?j}:S11Vd&W#qz*^'07U'dd.8iر8)=7wBUJ],sg C{ͤԚ؂N[«%yVx ,kս]nUpZHD+VUaK81C7H͆9YU.Ng$;}hC/U~S+V݆`Pi_gyƚ;e .g,w}.Nښ9v0nؽPfjeYQ.d5mAEPƋuji;>:;Ԙi¬0MMh?z"zP)uRY<zP&mɼ=CdYtRV#C¿7{l%jq*_w;?j0૬yz tLL6v8#Lv;v(`8i_78,}?'0h7rQpF)chAhGJF4&dSl&2YTAv h꿕r;Mf 3gUj`ٖt07(`Frr' `෍x5^#. CVdΣW ԵjNNԺViHLǤ2s~`Z4E5p9SVAsfB#WYg7 Fٯ%xU^6*wWɫ8=n3ׯ]($-^Vx N*j!K8q9WHY*Т4(k, . UPqM=b7.d NB\$bh  $;3V=$=!T(5H5o|"RX*ưQk"w F\[/SHi$DӨ$R8 L{C^B-D*TiSq2'R>ɑ,Da$. ]تڀ.#NܮA]q[#YTv:(_v6x}Б"-6g,.Rph7`"^$X]~'TQc-a]o{8 f4-4 Xunp U rB툸Vza->"mBszgd@4;lS$N{c0T|6C\JMFR~6 _nLXCTgR'&C&ӕL{2G(Q2Clhzd*(72 +Lh)6gMJz9+)OzZryx5Ы6vq1e˔y̆2rF=F[.Z,i[8P O.#? Jg -mv9p3 VM,0\Mo`'P>4M7Y`wRYUV/lEt"= vH`0fQk a 3l=kZA2tIE= .JvH%{qZg"&LaD\ Pt WHsY܀%9.l/7&8JUN;1"x[?8p@ᰗ:p'\4ly׬i51O ۍ^&Seb2uaÁ|0b#xĄaZ3@.¾өuwךt .]z -PbIHJ8x.Jl(C0#]k2 0=rftg!kx˥ٞb*3aʤLu%%4|laБ 9o3x SeJSK=tdP]<0&W$1hx*Z+lp;Zhoz 0i^wQ<:G˵a<b~Ϯ렲N,kOu\.* SEPK!Hrbohappyly-0.8.0.dist-info/RECORDɲHE-( y H a1 yF[ٙɫdf Yq=f)?=Vpo͂H_{ӹ;CʃiX>4ƻah?_&Vo>'ۘwsam/z^N$ůb(n8ʒQo2Fښƻȥ!|<걑49;X'[W"P-I&J]R[p0f2JJsE#6tOHuzf D͙:GOi=p}"u݊ OmmEə-Ӑð%|85'Vki XNR#XEL:)b~ GX}J3擌3J߫1\#8~qΛ* $ E*B\-;FuG6 fv=Q+@%RJHܳ<<ߠ8trl:CK}̋a%AX5=a;_>`1 9SaIXԉRdnl-wf"fFط0I"⫺ŲQ,Grt!)7q%RgO!B?>^QrOE! L|d%23]x~pEǶXUMgJj%SB# 21 RC9_^5bLBLʭA*F˦Xk%p324Ix%wE_Ch 0 GӮ`dze) Y] X^O+MMs H UTFx*rG"qIzILvCҙJA8FP2:2X,1\\<>0M\3J79T 42B3r["ׂ*u}rh[yI-ݥ}כeyWoloʄEqmD=Q>*g 2#_@GCN5(X %hU=dyJ%yj_Z`_`{:lLY˯w]EAo~J. D/KT :?﴿spxﶒ&bY&PvmZ(u24~`Vhy(epĜjE T fP5B\rD-6JJBѳ^3-}_߉& 5k|~lՀK+KQqyмr#r}E9'(>q=P5QjON[FiYkZuwg𣌅ad% `Muy#ՆI{!OsmkUܪhOx dE|NRՑ`!B|_#JTkmLQ&Vї<.,ETrDcށ׫ d6Esz!V6D}PuUۇc8{[,f EEdaGH >c{ YGJ7{xp;6%^NClZ"PugRGHY_Wqi[dKtصetPڈ#I_I¹{lIg lEm"o67 X?O2/X dw@Aok `(Z(~REZ9NEwdYt |$ M ?gnWe* }URBN- OPRmÖ.JwY з`Cφ PKƾNa&lllhappyly/__init__.pyPKґNh happyly/exceptions.pyPKOUN!happyly/_deprecations/__init__.pyPKOUNRrTThappyly/_deprecations/utils.pyPKOUNv))thappyly/caching/__init__.pyPKOUN6yyhappyly/caching/cacher.pyPKґN6n happyly/caching/mixins.pyPKґNE!Yhappyly/google_pubsub/__init__.pyPKOUNdj&happyly/google_pubsub/deserializers.pyPKґN2m<#happyly/google_pubsub/publishers.pyPKOUNץrr%|!happyly/google_pubsub/redis_cacher.pyPKOUNVX$1&happyly/google_pubsub/subscribers.pyPKOUN, *happyly/google_pubsub/high_level/__init__.pyPKґN3  (1,happyly/google_pubsub/high_level/base.pyPK7N -Hhappyly/google_pubsub/high_level/early_ack.pyPKOUN2R..,Jhappyly/google_pubsub/high_level/late_ack.pyPKґNJ6zz*KMhappyly/google_pubsub/high_level/simple.pyPKґNM/$$. Vhappyly/google_pubsub/high_level/with_cache.pyPKґN.bb}\happyly/handling/__init__.pyPKOUNBK!]happyly/handling/dummy_handler.pyPKґN)_happyly/handling/handler.pyPK7NAP!fhappyly/listening/__init__.pyPKґNgJ>>ghappyly/listening/executor.pyPKґNAJJlhappyly/listening/listener.pyPKOUNhappyly/logs/__init__.pyPKOUN%%'happyly/logs/base.pyPKOUN~happyly/logs/mixins.pyPKOUN-happyly/logs/request_id.pyPKґN6G{{Žhappyly/pubsub/__init__.pyPKґNO}D99xhappyly/pubsub/publisher.pyPK7NMSShappyly/pubsub/subscriber.pyPKOUN_))whappyly/schemas/__init__.pyPKOUNhappyly/schemas/schema.pyPKґN6Mt!)happyly/serialization/__init__.pyPKґNNw%happyly/serialization/deserializer.pyPKґN^whappyly/serialization/dummy.pyPKґNXHhappyly/serialization/flask.pyPKґN-56happyly/serialization/json.pyPKґNZ.#happyly/serialization/serializer.pyPKOUNN''happyly-0.8.0.dist-info/LICENSEPK!HPO2happyly-0.8.0.dist-info/WHEELPK!Hib  happyly-0.8.0.dist-info/METADATAPK!Hrbohappyly-0.8.0.dist-info/RECORDPK++ S