PK{NI/Uhappyly/__init__.py"""Python library for Pub/Sub message handling.""" # flake8: noqa F401 __version__ = '0.5.0' from .listening import Executor, Listener, BaseListener from .schemas import Schema from .caching import Cacher from .serialization import Serializer, Deserializer from .handling import Handler, DUMMY_HANDLER def _welcome(): import sys sys.stdout.write(f'Using happyly v{__version__}.\n') def _setup_warnings(): import warnings for warning_type in PendingDeprecationWarning, DeprecationWarning: warnings.filterwarnings( 'always', category=warning_type, module=r'^{0}\.'.format(__name__) ) _welcome() _setup_warnings() del _welcome del _setup_warnings PKxyNv))happyly/caching/__init__.pyfrom .cacher import Cacher # noqa: F401 PKxyNooohappyly/caching/cacher.pyfrom abc import ABC, abstractmethod from typing import Any _no_default_impl = NotImplementedError('No default implementation for class Cacher') class Cacher(ABC): """ Abstract base class which defines interface of any caching component to be used via CacheByRequestIdMixin or similar mixin. """ @abstractmethod def add(self, data: Any, key: str): """ Add the provided data to cache and store it by the provided key. """ raise _no_default_impl @abstractmethod def remove(self, key: str): """ Remove data from cache which is stored by the provided key. """ raise _no_default_impl @abstractmethod def get(self, key: str): """ Returns data which is stored in cache by the provided key. """ raise _no_default_impl PKxyNW^happyly/caching/mixins.pyimport json from typing import Any, Mapping, Optional from happyly.caching.cacher import Cacher from happyly.handling import HandlingResult class CacheByRequestIdMixin: """ Mixin which adds caching functionality to Listener. Utilizes notions of listener's topic and request id of message - otherwise will not work. To be used via multiple inheritance. For example, given some component SomeListener you can define its caching equivalent by defining SomeCachedListener which inherits from both SomeListener and CacheByRequestIdMixin. """ def __init__(self, cacher: Cacher): self.cacher = cacher def on_received(self, message: Any): super().on_received(message) try: req_id = self._get_req_id(message) except Exception: pass else: data = json.dumps( {'topic': self.from_topic, 'data': json.loads(message.data)} ) self.cacher.add(data, key=req_id) def _get_req_id(self, message: Any) -> str: assert self.deserializer is not None attribtues = self.deserializer.deserialize(message) return attribtues[self.deserializer.request_id_field] def _rm(self, parsed_message: Mapping[str, Any]): assert self.deserializer is not None self.cacher.remove(parsed_message[self.deserializer.request_id_field]) def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, ): super().on_published(original_message, parsed_message, result) if parsed_message is not None: self._rm(parsed_message) def on_deserialization_failed(self, message: Any, error: Exception): super().on_deserialization_failed(message, error) try: req_id = self._get_req_id(message) except Exception: pass else: self.cacher.remove(key=req_id) PKx{NIUKK!happyly/google_pubsub/__init__.py# flake8: noqa F401 from .high_level import ( GoogleSimpleSender, GoogleSimpleReceiver, GoogleReceiveAndReplyComponent, GoogleSimpleReceiveAndReply, GoogleCachedReceiveAndReply, GoogleCachedReceiver, GoogleLateAckReceiver, GoogleLateAckReceiveAndReply, GoogleBaseReceiver, GoogleBaseReceiveAndReply, ) from .redis_cacher import RedisCacher from .deserializers import JSONDeserializerWithRequestIdRequired from .serializers import BinaryJSONSerializer from .publishers import GooglePubSubPublisher from .subscribers import GooglePubSubSubscriber PKyNά&happyly/google_pubsub/deserializers.pyfrom typing import Mapping, Any import json from attr import attrs import marshmallow from happyly.serialization import Deserializer @attrs(auto_attribs=True, frozen=True) class JSONDeserializerWithRequestIdRequired(Deserializer): """ Deserializer for Google Pub/Sub messages which expects a message of certain schema to be written in `message.data` as JSON encoded into binary data with utf-8. Schema used with this serializer must define some field which is used as request id (you can specify which one in constructor). If `JSONDeserializerWithRequestIdRequired` fails to deserialize some message, it tries to fetch request id and provide error message. """ schema: marshmallow.Schema request_id_field: str = 'request_id' status_field: str = 'status' error_field: str = 'error' _status_error: str = 'ERROR' def deserialize(self, message: Any) -> Mapping[str, Any]: """ Loads message attributes from `message.data`, expects it to be a JSON which corresponds `self.schema` encoded with utf-8. """ data = message.data.decode('utf-8') deserialized, _ = self.schema.loads(data) return deserialized def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: """ Provides a fallback result when `deserialize` fails. Returns a dict with attributes: * * * Field names can be specified in constructor. If request id cannot be fetched, it is set to an empty string. """ attributes = json.loads(message.data) try: return { self.request_id_field: attributes[self.request_id_field], self.status_field: self._status_error, self.error_field: repr(error), } except KeyError as e: return { self.request_id_field: '', self.status_field: self._status_error, self.error_field: f'{repr(e)}: ' f'Message contains no {self.request_id_field}', } PKcqN#happyly/google_pubsub/publishers.pyfrom typing import Any from google.cloud import pubsub_v1 from happyly.pubsub import Publisher class GooglePubSubPublisher(Publisher): def __init__(self, project, *args, **kwargs): super().__init__(*args, **kwargs) self.project = project self._publisher_client = pubsub_v1.PublisherClient() def __attrs_post_init__(self): self._publisher_client = pubsub_v1.PublisherClient() def publish_message(self, serialized_message: Any, to: str): future = self._publisher_client.publish( f'projects/{self.project}/topics/{to}', serialized_message ) try: future.result() return except Exception as e: raise e PK΄{Nץrr%happyly/google_pubsub/redis_cacher.pyimport logging from happyly.caching.cacher import Cacher _LOGGER = logging.getLogger(__name__) class RedisCacher(Cacher): def __init__(self, host: str, port: int, prefix: str = ''): try: import redis except ImportError as e: raise ImportError('Please install redis>=3.0 to use this feature.') from e self.prefix = prefix self.client = redis.StrictRedis(host=host, port=port) _LOGGER.info( f'Cache was successfully initialized with Redis client ({host}:{port})' ) if self.prefix != '': _LOGGER.info(f'Using prefix {self.prefix}') def add(self, data: str, key: str): self.client.hset(self.prefix, key, data) _LOGGER.info(f'Cached message with id {key}') def remove(self, key: str): self.client.hdel(self.prefix, key) _LOGGER.info(f'Message with id {key} was removed from cache') def get(self, key: str): self.client.hget(self.prefix, key) def get_all(self): keys = self.client.hkeys(self.prefix) return [self.client.hget(self.prefix, k) for k in keys] PKaN%v$happyly/google_pubsub/serializers.pyfrom typing import Mapping, Any import marshmallow from attr import attrs from happyly.serialization.serializer import Serializer @attrs(auto_attribs=True, frozen=True) class BinaryJSONSerializer(Serializer): schema: marshmallow.Schema def serialize(self, message_attributes: Mapping[str, Any]) -> Any: data, _ = self.schema.dumps(message_attributes) return data.encode('utf-8') PK([{NVX$happyly/google_pubsub/subscribers.pyimport logging from typing import Callable, Any from attr import attrs, attrib from google.cloud import pubsub_v1 from happyly.pubsub import SubscriberWithAck _LOGGER = logging.getLogger(__name__) @attrs(auto_attribs=True) class GooglePubSubSubscriber(SubscriberWithAck): project: str subscription_name: str _subscription_client: pubsub_v1.SubscriberClient = attrib(init=False) _subscription_path: str = attrib(init=False) def __attrs_post_init__(self): s = pubsub_v1.SubscriberClient() self._subscription_path = s.subscription_path( self.project, self.subscription_name ) self._subscription_client = s def subscribe(self, callback: Callable[[Any], Any]): _LOGGER.info(f'Starting to listen to {self.subscription_name}') return self._subscription_client.subscribe(self._subscription_path, callback) def ack(self, message): message.ack() PK ZeroToManyParsedMessages: """ Applies logic using a provided message, optionally gives back one or more results. Each result consists of message attributes which can be serialized and sent. When fails, calls `on_handling_failed` :param message: A parsed message as a dictionary of attributes :return: None if no result is extracted from handling, a dictionary of attributes for single result or a list of dictionaries if handling provides multiple results """ raise _no_base_impl @abstractmethod def on_handling_failed( self, message: Mapping[str, Any], error: Exception ) -> ZeroToManyParsedMessages: """ Applies fallback logic using a provided message when `handle` fails, optionally gives back one or more results. Enforces users of `Handler` class to provide explicit strategy for errors. If you want to propagate error further to the underlying Executor/Handler, just raise an exception here. :param message: A parsed message as a dictionary of attributes :param error: Error raised by `handle` :return: None if no result is extracted from handling, a dictionary of attributes for single result or a list of dictionaries if handling provides multiple results """ raise _no_base_impl def __call__(self, message: Mapping[str, Any]) -> HandlingResult: try: result_data = self.handle(message) return HandlingResult.ok(result_data) except Exception as e: result_data = self.on_handling_failed(message, e) return HandlingResult.err(result_data) PKcqN#happyly/handling/handling_result.pyfrom enum import Enum from attr import attrs from .types import ZeroToManyParsedMessages class HandlingResultStatus(Enum): OK = 'OK' ERR = 'ERR' @attrs(auto_attribs=True, frozen=True) class HandlingResult: status: HandlingResultStatus data: ZeroToManyParsedMessages @classmethod def ok(cls, data): return HandlingResult(status=HandlingResultStatus.OK, data=data) @classmethod def err(cls, data): return HandlingResult(status=HandlingResultStatus.ERR, data=data) PK~aNfhappyly/handling/types.pyfrom typing import Mapping, Any, Union, List _ParsedMessage = Mapping[str, Any] ZeroToManyParsedMessages = Union[_ParsedMessage, List[_ParsedMessage], None] PKxyNFjhappyly/listening/__init__.py# flake8: noqa F401 from .listener import Listener, BaseListener, EarlyAckListener, LateAckListener from .executor import Executor PKns{NU&&happyly/listening/executor.pyimport logging from typing import Mapping, Any, Optional, TypeVar, Generic from attr import attrs from happyly.handling.dummy_handler import DUMMY_HANDLER from happyly.handling import Handler, HandlingResult from happyly.serialization.deserializer import Deserializer from happyly.pubsub import Publisher from happyly.serialization import DUMMY_DESERIALIZER _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=Publisher) @attrs(auto_attribs=True) class Executor(Generic[D, P]): """ Component which is able to run handler as a part of more complex pipeline. Implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and introduces callbacks between the stages which can be easily overridden. Executor does not implement stages themselves, it takes internal implementation of stages from corresponding components: handler, deserializer, publisher. It means that executor is universal and can work with any serialization/messaging technology depending on concrete components provided to executor's constructor. """ handler: Handler = DUMMY_HANDLER """ Provides implementation of handling stage to Executor. """ deserializer: Optional[D] = None """ Provides implementation of deserialization stage to Executor. If not present, no deserialization is performed. """ publisher: Optional[P] = None """ Provides implementation of serialization and publishing stages to Executor. If not present, no publishing is performed. """ def __attrs_post_init__(self): if self.deserializer is None: self.deserializer = DUMMY_DESERIALIZER def on_received(self, message: Any): """ Callback which is called as soon as pipeline is run. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization """ _LOGGER.info(f"Received message: {message}") def on_deserialized(self, original_message: Any, parsed_message: Mapping[str, Any]): """ Callback which is called right after message was deserialized successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization """ _LOGGER.info( f"Message successfully deserialized into attributes: {parsed_message}" ) def on_deserialization_failed(self, message: Any, error: Exception): """ Callback which is called right after deserialization failure. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization :param error: exception object which was raised """ _LOGGER.exception( f"Was not able to deserialize the following message: {message}" ) def on_handled( self, original_message: Any, parsed_message: Mapping[str, Any], result: HandlingResult, ): """ Callback which is called right after message was handled (successfully or not, but without raising an exception). Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) """ _LOGGER.info(f"Message handled, status {result.status}") def on_handling_failed( self, original_message: Any, parsed_message: Mapping[str, Any], error: Exception ): """ Callback which is called if handler's `on_handling_failed` raises an exception. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param error: exception object which was raised """ _LOGGER.exception(f'Handler raised an exception.') def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, ): """ Callback which is called right after message was published successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) """ _LOGGER.info(f"Published result: {result}") def on_publishing_failed( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, error: Exception, ): """ Callback which is called when publisher fails to publish. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) :param error: exception object which was raised """ _LOGGER.exception(f"Failed to publish result: {result}") def on_finished(self, original_message: Any, error: Optional[Exception]): """ Callback which is called when pipeline finishes its execution. Is guaranteed to be called, whether pipeline succeeds or not. :param original_message: Message as it has been received, without any deserialization :param error: exception object which was raised or None """ _LOGGER.info('Pipeline execution finished.') def _when_parsing_succeeded(self, original: Any, parsed: Mapping[str, Any]): try: result = self.handler(parsed) self.on_handled( original_message=original, parsed_message=parsed, result=result ) except Exception as e: self.on_handling_failed(original, parsed, e) self.on_finished(original, e) return if self.publisher is not None: self._try_publish(original, parsed, result) else: self.on_finished(original_message=original, error=None) def _when_parsing_failed(self, message: Any, error: Exception): assert self.deserializer is not None if self.publisher is None: self.on_finished(original_message=message, error=None) return try: result = self.deserializer.build_error_result(message, error) handling_result = HandlingResult.err(result) except Exception as new_err: _LOGGER.exception( "Deserialization failed and error result cannot be built." ) self.on_finished(message, new_err) else: self._try_publish(original=message, parsed=None, result=handling_result) def _try_publish( self, original: Any, parsed: Optional[Mapping[str, Any]], result: HandlingResult ): assert self.publisher is not None try: self.publisher.publish_result(result) self.on_published( original_message=original, parsed_message=parsed, result=result ) self.on_finished(original, error=None) except Exception as e: self.on_publishing_failed( original_message=original, parsed_message=parsed, result=result, error=e ) self.on_finished(original, error=e) def _after_on_received(self, message: Optional[Any]): assert self.deserializer is not None try: parsed = self.deserializer.deserialize(message) except Exception as e: self.on_deserialization_failed(message, error=e) self._when_parsing_failed(message, error=e) else: self.on_deserialized(message, parsed) self._when_parsing_succeeded(original=message, parsed=parsed) def run(self, message: Optional[Any] = None): """ Method that starts execution of pipeline stages. :param message: Message as is, without deserialization. Or message attributes if the executor was instantiated with neither a deserializer nor a handler (useful to quickly publish message attributes by hand) """ self.on_received(message) self._after_on_received(message) if __name__ == '__main__': logging.basicConfig(level=logging.INFO) Executor(lambda m: HandlingResult.ok(42)).run() # type: ignore PK"t{NQ@happyly/listening/listener.pyimport logging import warnings from typing import Any, TypeVar, Optional, Generic from happyly.handling import Handler from happyly.handling.dummy_handler import DUMMY_HANDLER from happyly.pubsub import Publisher from happyly.pubsub.subscriber import BaseSubscriber, SubscriberWithAck from happyly.serialization import Deserializer from .executor import Executor _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=Publisher) S = TypeVar("S", bound=BaseSubscriber) class BaseListener(Executor[D, P], Generic[D, P, S]): """ Listener is a form of Executor which is able to run pipeline by an event coming from a subscription. Listener itself doesn't know how to subscribe, it subscribes via a provided subscriber. As any executor, implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and contains callbacks between the stages which can be easily overridden. As any executor, listener does not implement stages themselves, it takes internal implementation of stages from corresponding components: handler, deserializer, publisher. It means that listener is universal and can work with any serialization/messaging technology depending on concrete components provided to listener's constructor. """ def __init__( self, subscriber: S, handler: Handler, deserializer: Optional[D] = None, publisher: Optional[P] = None, ): assert handler is not DUMMY_HANDLER super().__init__( handler=handler, deserializer=deserializer, publisher=publisher ) self.subscriber: S = subscriber """ Provides implementation of how to subscribe. """ def start_listening(self): return self.subscriber.subscribe(callback=self.run) class ListenerWithAck(BaseListener[D, P, SubscriberWithAck], Generic[D, P]): """ Acknowledge-aware listener. Defines `ListenerWithAck.ack()`. Subclass ListenerWithAck and specify when to ack by overriding the corresponding callbacks. """ def __init__( self, subscriber: SubscriberWithAck, handler: Handler, deserializer: Optional[D] = None, publisher: Optional[P] = None, ): super().__init__( handler=handler, deserializer=deserializer, publisher=publisher, subscriber=subscriber, ) def on_acknowledged(self, message: Any): """ Callback which is called write after message was acknowledged. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization """ _LOGGER.info('Message acknowledged') def ack(self, message: Any): """ Acknowledge the message using implementation from subscriber, then log success. :param message: Message as it has been received, without any deserialization """ self.subscriber.ack(message) self.on_acknowledged(message) class EarlyAckListener(ListenerWithAck[D, P], Generic[D, P]): """ Acknowledge-aware listener, which performs `ack` right after `on_received` callback is finished. """ def _after_on_received(self, message: Optional[Any]): self.ack(message) super()._after_on_received(message) class LateAckListener(ListenerWithAck[D, P], Generic[D, P]): """ Acknowledge-aware listener, which performs `ack` at the very end of pipeline. """ def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) # for compatibility, to be deprecated class Listener(EarlyAckListener[D, P], Generic[D, P]): def __init__(self, *args, **kwargs): warnings.warn( "Please use EarlyAckListener instead, " "Listener will be deprecated in the future.", PendingDeprecationWarning, ) super().__init__(*args, **kwargs) PK([{Nhappyly/logs/__init__.pyPK]{N%%happyly/logs/base.pyfrom abc import ABC, abstractmethod _not_impl = NotImplementedError('No default implementation in base logger class') class BaseLogger(ABC): @abstractmethod def info(self, message: str): raise _not_impl @abstractmethod def debug(self, message: str): raise _not_impl @abstractmethod def warning(self, message: str): raise _not_impl @abstractmethod def exception(self, message: str): raise _not_impl @abstractmethod def error(self, message: str): raise _not_impl PK([{Nhappyly/logs/mixins.pyPKE[{NxPhappyly/logs/request_id.pyfrom logging import Logger from attr import attrs from logs.base import BaseLogger @attrs(auto_attribs=True) class RequestIdLogger(BaseLogger): logger: Logger topic: str = '' request_id: str = '' def _fmt(self, message): return f'{self.topic:35} | {self.request_id:40} |> {message}' def info(self, message: str): self.logger.info(self._fmt(message)) def debug(self, message: str): self.logger.debug(self._fmt(message)) def warning(self, message: str): self.logger.warning(self._fmt(message)) def exception(self, message: str): self.logger.exception(self._fmt(message)) def error(self, message: str): self.logger.error(self._fmt(message)) PKxyN<happyly/pubsub/__init__.pyfrom .publisher import Publisher # noqa: F401 from .subscriber import Subscriber, SubscriberWithAck, BaseSubscriber # noqa: F401 PKcqNO[  happyly/pubsub/publisher.pyfrom abc import ABC, abstractmethod from typing import Optional, Mapping, Any, List from happyly.handling import HandlingResult, HandlingResultStatus from happyly.serialization.serializer import Serializer class Publisher(ABC): def __init__( self, serializer: Serializer, publish_all_to: Optional[str] = None, publish_success_to: Optional[str] = None, publish_failure_to: Optional[str] = None, ): self._serializer = serializer if publish_all_to is not None and all( p is None for p in [publish_success_to, publish_failure_to] ): self.publish_success_to: str = publish_all_to self.publish_failure_to: str = publish_all_to elif ( publish_success_to is not None and publish_failure_to is not None ) and publish_all_to is None: self.publish_success_to: str = publish_success_to self.publish_failure_to: str = publish_failure_to else: raise ValueError( """Provide "publish_all_to" only, or else provide both "publish_success_to" and "publish_failure_to""" ) @abstractmethod def publish_message(self, serialized_message: Any, to: str): raise NotImplementedError("No default implementation in base Publisher class") def _get_destination(self, status: HandlingResultStatus): if status == HandlingResultStatus.OK: return self.publish_success_to elif status == HandlingResultStatus.ERR: return self.publish_failure_to else: raise ValueError(f"Unknown status {status}") def _publish_serialized(self, data: Mapping[str, Any], to: str): serialized = self._serializer.serialize(data) self.publish_message(serialized, to) def publish_result(self, result: HandlingResult): data = result.data if data is None: return destination = self._get_destination(result.status) if isinstance(data, Mapping): self._publish_serialized(data, to=destination) elif isinstance(data, List): for item in data: self._publish_serialized(item, to=destination) else: raise ValueError("Invalid data structure") PKxyNOdMohappyly/pubsub/subscriber.pyimport warnings from abc import ABC, abstractmethod from typing import Callable, Any class BaseSubscriber(ABC): @abstractmethod def subscribe(self, callback: Callable[[Any], Any]): raise NotImplementedError class SubscriberWithAck(BaseSubscriber, ABC): @abstractmethod def ack(self, message): raise NotImplementedError # for compatibility, to be deprecated class Subscriber(SubscriberWithAck, ABC): def __init__(self, *args, **kwargs): warnings.warn( "Please use SubscriberWithAck instead, " "Subscriber will be deprecated in the future.", DeprecationWarning, ) super().__init__(*args, **kwargs) PKxyN_))happyly/schemas/__init__.pyfrom .schema import Schema # noqa: F401 PKxyN, happyly/schemas/schema.pyimport marshmallow class Schema(marshmallow.Schema): """ Marshmallow schema, which raises errors on mismatch (extra fields provided also raise exception). Subclass it just like any marshmallow Schema to describe schema. Instantiation with no arguments is a good strict default, but you can pass any arguments valid for `marshmallow.Schema` """ def __init__(self, *args, **kwargs): super().__init__(strict=True, *args, **kwargs) @marshmallow.validates_schema(pass_original=True) def check_unknown_fields(self, data, original_data): unknown = set(original_data) - set(self.fields) if unknown: raise marshmallow.ValidationError('Unknown field', unknown) PKX{Nj!happyly/serialization/__init__.pyfrom .deserializer import Deserializer # noqa: F401 from .serializer import Serializer # noqa: F401 from .dummy import DUMMY_DESERIALIZER # noqa: F401 PKcqNJ%happyly/serialization/deserializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any _not_impl = NotImplementedError('No default implementation in base Deserializer class') class Deserializer(ABC): @abstractmethod def deserialize(self, message: Any) -> Mapping[str, Any]: raise _not_impl @abstractmethod def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: raise _not_impl PK`{N,AUhappyly/serialization/dummy.pyfrom typing import Any, Mapping from .deserializer import Deserializer class DummyDeserializer(Deserializer): def deserialize(self, message) -> Mapping[str, Any]: if isinstance(message, Mapping): return message elif message is None: return {} else: raise ValueError( 'Dummy deserializer requires message attributes ' 'in form of dict-like structure as input' ) def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: raise error DUMMY_DESERIALIZER: DummyDeserializer = DummyDeserializer() PKcqNe,,#happyly/serialization/serializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any _no_default = NotImplementedError('No default implementation in base Serializer class') class Serializer(ABC): @abstractmethod def serialize(self, message_attributes: Mapping[str, Any]) -> Any: raise _no_default PKYaNk))happyly-0.5.0.dist-info/LICENSEMIT License Copyright (c) 2019 equeumco Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!HPOhappyly-0.5.0.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,szd&Y)r$[)T&UrPK!HQ  happyly-0.5.0.dist-info/METADATAVmoF_1xWDPTE^{zw/w3\.ͻ;3o~+oxyiVv{1[;!{d{l3}0!tF3*.c!tnYvbz[CE+C+VEKgڣ7>go07Shi+0n J|Ac\)=j \I~Qı$xUQw܅u&Z${H'|>F74;v!?2:A| rۘΎ^H.P( a:]&zJh06RZB_2V~inœ;fUE3!k b)Z yM™G(8㺍]VTK0A!8 ~4&`̣㰿1{P:o0.]= 'z=w=믤BϾ%Sϰ~L&8UiRbA*$WLAW= zlNꎜȲ럮OFY}|'jWα2FÂ/Ns<G;ŀ#ڨgbH8Ou+ t҃u3ŋEQ>-. 6?2OgtH{ jV*yeir@(SP; Sevԉ(UX-nTg[ ASFg3R/\"Xˠ ^H9x]TRKr²m$%C 7S] }d k%e0XDA&XGx[Lt-.TF]XCr/rz5]J$ 8)5|Ht֜! d} \ٸ@hEfH$Pq$0mD6FUHԁ'^}8ju )ʨW2P0 Ե"69{]:\,z7:/MqqNp_}(pWnz'#[;x<#?PK!H#l happyly-0.5.0.dist-info/RECORDI-pB 2EAEz&<,{_>[UM*0xޏfHMiq蟖M ,d q0 [7* Twȓ=! &H[8"‰; B(P1B][M[O|* yFMŮ0Bbh OP/@ /x\4a`6 5;I&!(1봈f 1F6jsOJR JtaӳB9'oQ kޚA%,P7>DДƴh*K[I>E] a[oQ>̖TN.%Y,ye^Urs ػ̻8tzi:%M|5Š ^XX XRhr(μ_I }AD^c725q}G%s"{;=ϰM];^tn uKpD:bw@yE q[)W8ٝ0JhX-w=\c@gCZV,IIx n`ј>q*RB,M|GYcENÄXG CIQ!,:Qd0NN^m>biyj%yL&|v1;nn:J~2fT#g֗եENKLXVL`kw|JtKAB${jJvKx =Rq迉 V)]d!zMDxɈQ! G3?f Kg,CF}(?up8ƏdN!D7,o ~T6({S;BV,O%O ^_ğ lQys)azM@:yδkծ"5--+t/\p&*d,k?Oɺse.78.#Zhqtԉ/cЯU{_N*# q-=;!:>]`[!6>SM";:kpYN'FRk:c[TIU7G/rlyw)LO!DoyO`Gxcn֭E_QyϬF%ҋNUPkqm\/L7v?)p$"d|7f.{y?AvPK{NI/Uhappyly/__init__.pyPKxyNv))happyly/caching/__init__.pyPKxyNoooOhappyly/caching/cacher.pyPKxyNW^happyly/caching/mixins.pyPKx{NIUKK! happyly/google_pubsub/__init__.pyPKyNά&happyly/google_pubsub/deserializers.pyPKcqN#shappyly/google_pubsub/publishers.pyPK΄{Nץrr%happyly/google_pubsub/redis_cacher.pyPKaN%v$@"happyly/google_pubsub/serializers.pyPK([{NVX$$happyly/google_pubsub/subscribers.pyPKhappyly/serialization/__init__.pyPKcqNJ%happyly/serialization/deserializer.pyPK`{N,AUhappyly/serialization/dummy.pyPKcqNe,,#happyly/serialization/serializer.pyPKYaNk)),happyly-0.5.0.dist-info/LICENSEPK!HPOhappyly-0.5.0.dist-info/WHEELPK!HQ  happyly-0.5.0.dist-info/METADATAPK!H#l dhappyly-0.5.0.dist-info/RECORDPK((