PK"sNhLicchappyly/__init__.py"""Python library for Pub/Sub message handling.""" # flake8: noqa F401 import logging __version__ = '0.6.0' from .listening import Executor, Listener, BaseListener from .schemas import Schema from .caching import Cacher from .serialization import Serializer, Deserializer from .handling import Handler, DUMMY_HANDLER from .exceptions import StopPipeline def _welcome(): import sys sys.stdout.write(f'Using happyly v{__version__}.\n') def _setup_warnings(): import warnings for warning_type in PendingDeprecationWarning, DeprecationWarning: warnings.filterwarnings( 'always', category=warning_type, module=r'^{0}\.'.format(__name__) ) def _setup_logging(): logging.getLogger(__name__).setLevel(logging.INFO) _welcome() _setup_warnings() _setup_logging() del _welcome del _setup_warnings del _setup_logging PK}N!happyly/_deprecations/__init__.pyPKN[NRrTThappyly/_deprecations/utils.pyimport warnings def will_be_removed( deprecated_name: str, use_instead, removing_in_version: str, stacklevel=2 ): warnings.warn( f"Please use {use_instead.__name__} instead, " f"{deprecated_name} will be removed in happyly v{removing_in_version}.", DeprecationWarning, stacklevel=stacklevel, ) PKeNv))happyly/caching/__init__.pyfrom .cacher import Cacher # noqa: F401 PK}Nooohappyly/caching/cacher.pyfrom abc import ABC, abstractmethod from typing import Any _no_default_impl = NotImplementedError('No default implementation for class Cacher') class Cacher(ABC): """ Abstract base class which defines interface of any caching component to be used via CacheByRequestIdMixin or similar mixin. """ @abstractmethod def add(self, data: Any, key: str): """ Add the provided data to cache and store it by the provided key. """ raise _no_default_impl @abstractmethod def remove(self, key: str): """ Remove data from cache which is stored by the provided key. """ raise _no_default_impl @abstractmethod def get(self, key: str): """ Returns data which is stored in cache by the provided key. """ raise _no_default_impl PK}NW^happyly/caching/mixins.pyimport json from typing import Any, Mapping, Optional from happyly.caching.cacher import Cacher from happyly.handling import HandlingResult class CacheByRequestIdMixin: """ Mixin which adds caching functionality to Listener. Utilizes notions of listener's topic and request id of message - otherwise will not work. To be used via multiple inheritance. For example, given some component SomeListener you can define its caching equivalent by defining SomeCachedListener which inherits from both SomeListener and CacheByRequestIdMixin. """ def __init__(self, cacher: Cacher): self.cacher = cacher def on_received(self, message: Any): super().on_received(message) try: req_id = self._get_req_id(message) except Exception: pass else: data = json.dumps( {'topic': self.from_topic, 'data': json.loads(message.data)} ) self.cacher.add(data, key=req_id) def _get_req_id(self, message: Any) -> str: assert self.deserializer is not None attribtues = self.deserializer.deserialize(message) return attribtues[self.deserializer.request_id_field] def _rm(self, parsed_message: Mapping[str, Any]): assert self.deserializer is not None self.cacher.remove(parsed_message[self.deserializer.request_id_field]) def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, ): super().on_published(original_message, parsed_message, result) if parsed_message is not None: self._rm(parsed_message) def on_deserialization_failed(self, message: Any, error: Exception): super().on_deserialization_failed(message, error) try: req_id = self._get_req_id(message) except Exception: pass else: self.cacher.remove(key=req_id) PK扁Nޢ:66happyly/exceptions/__init__.pyfrom .stop_pipeline import StopPipeline # noqa: F401 PK扁NΓr#happyly/exceptions/stop_pipeline.pyfrom attr import attrs @attrs(auto_attribs=True, auto_exc=True) # type: ignore class StopPipeline(Exception): reason: str = '' PK}NIUKK!happyly/google_pubsub/__init__.py# flake8: noqa F401 from .high_level import ( GoogleSimpleSender, GoogleSimpleReceiver, GoogleReceiveAndReplyComponent, GoogleSimpleReceiveAndReply, GoogleCachedReceiveAndReply, GoogleCachedReceiver, GoogleLateAckReceiver, GoogleLateAckReceiveAndReply, GoogleBaseReceiver, GoogleBaseReceiveAndReply, ) from .redis_cacher import RedisCacher from .deserializers import JSONDeserializerWithRequestIdRequired from .serializers import BinaryJSONSerializer from .publishers import GooglePubSubPublisher from .subscribers import GooglePubSubSubscriber PK}Nά&happyly/google_pubsub/deserializers.pyfrom typing import Mapping, Any import json from attr import attrs import marshmallow from happyly.serialization import Deserializer @attrs(auto_attribs=True, frozen=True) class JSONDeserializerWithRequestIdRequired(Deserializer): """ Deserializer for Google Pub/Sub messages which expects a message of certain schema to be written in `message.data` as JSON encoded into binary data with utf-8. Schema used with this serializer must define some field which is used as request id (you can specify which one in constructor). If `JSONDeserializerWithRequestIdRequired` fails to deserialize some message, it tries to fetch request id and provide error message. """ schema: marshmallow.Schema request_id_field: str = 'request_id' status_field: str = 'status' error_field: str = 'error' _status_error: str = 'ERROR' def deserialize(self, message: Any) -> Mapping[str, Any]: """ Loads message attributes from `message.data`, expects it to be a JSON which corresponds `self.schema` encoded with utf-8. """ data = message.data.decode('utf-8') deserialized, _ = self.schema.loads(data) return deserialized def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: """ Provides a fallback result when `deserialize` fails. Returns a dict with attributes: * * * Field names can be specified in constructor. If request id cannot be fetched, it is set to an empty string. """ attributes = json.loads(message.data) try: return { self.request_id_field: attributes[self.request_id_field], self.status_field: self._status_error, self.error_field: repr(error), } except KeyError as e: return { self.request_id_field: '', self.status_field: self._status_error, self.error_field: f'{repr(e)}: ' f'Message contains no {self.request_id_field}', } PKeN#happyly/google_pubsub/publishers.pyfrom typing import Any from google.cloud import pubsub_v1 from happyly.pubsub import Publisher class GooglePubSubPublisher(Publisher): def __init__(self, project, *args, **kwargs): super().__init__(*args, **kwargs) self.project = project self._publisher_client = pubsub_v1.PublisherClient() def __attrs_post_init__(self): self._publisher_client = pubsub_v1.PublisherClient() def publish_message(self, serialized_message: Any, to: str): future = self._publisher_client.publish( f'projects/{self.project}/topics/{to}', serialized_message ) try: future.result() return except Exception as e: raise e PK}Nץrr%happyly/google_pubsub/redis_cacher.pyimport logging from happyly.caching.cacher import Cacher _LOGGER = logging.getLogger(__name__) class RedisCacher(Cacher): def __init__(self, host: str, port: int, prefix: str = ''): try: import redis except ImportError as e: raise ImportError('Please install redis>=3.0 to use this feature.') from e self.prefix = prefix self.client = redis.StrictRedis(host=host, port=port) _LOGGER.info( f'Cache was successfully initialized with Redis client ({host}:{port})' ) if self.prefix != '': _LOGGER.info(f'Using prefix {self.prefix}') def add(self, data: str, key: str): self.client.hset(self.prefix, key, data) _LOGGER.info(f'Cached message with id {key}') def remove(self, key: str): self.client.hdel(self.prefix, key) _LOGGER.info(f'Message with id {key} was removed from cache') def get(self, key: str): self.client.hget(self.prefix, key) def get_all(self): keys = self.client.hkeys(self.prefix) return [self.client.hget(self.prefix, k) for k in keys] PKmN%v$happyly/google_pubsub/serializers.pyfrom typing import Mapping, Any import marshmallow from attr import attrs from happyly.serialization.serializer import Serializer @attrs(auto_attribs=True, frozen=True) class BinaryJSONSerializer(Serializer): schema: marshmallow.Schema def serialize(self, message_attributes: Mapping[str, Any]) -> Any: data, _ = self.schema.dumps(message_attributes) return data.encode('utf-8') PK}NVX$happyly/google_pubsub/subscribers.pyimport logging from typing import Callable, Any from attr import attrs, attrib from google.cloud import pubsub_v1 from happyly.pubsub import SubscriberWithAck _LOGGER = logging.getLogger(__name__) @attrs(auto_attribs=True) class GooglePubSubSubscriber(SubscriberWithAck): project: str subscription_name: str _subscription_client: pubsub_v1.SubscriberClient = attrib(init=False) _subscription_path: str = attrib(init=False) def __attrs_post_init__(self): s = pubsub_v1.SubscriberClient() self._subscription_path = s.subscription_path( self.project, self.subscription_name ) self._subscription_client = s def subscribe(self, callback: Callable[[Any], Any]): _LOGGER.info(f'Starting to listen to {self.subscription_name}') return self._subscription_client.subscribe(self._subscription_path, callback) def ack(self, message): message.ack() PK}N,happyly/google_pubsub/high_level/__init__.py# flake8: noqa F401 from .simple import ( GoogleSimpleSender, GoogleSimpleReceiver, GoogleSimpleReceiveAndReply, GoogleReceiveAndReplyComponent, ) from .with_cache import GoogleCachedReceiveAndReply, GoogleCachedReceiver from .late_ack import GoogleLateAckReceiver, GoogleLateAckReceiveAndReply from .early_ack import GoogleEarlyAckReceiver, GoogleEarlyAckReceiveAndReply from .base import GoogleBaseReceiver, GoogleBaseReceiveAndReply PKXNްr(happyly/google_pubsub/high_level/base.pyimport logging from typing import Optional, Union, Any, Mapping import marshmallow from happyly.handling import HandlingResult from happyly.logs.request_id import RequestIdLogger from ..subscribers import GooglePubSubSubscriber from ..deserializers import JSONDeserializerWithRequestIdRequired from ..publishers import GooglePubSubPublisher from ..serializers import BinaryJSONSerializer from happyly import Handler from happyly.listening.listener import ListenerWithAck _LOGGER = logging.getLogger(__name__) def _format_message(message): return f'data: {message.data}, attributes: {message.attributes}' class _BaseGoogleListenerWithRequestIdLogger( ListenerWithAck[ JSONDeserializerWithRequestIdRequired, Union[None, GooglePubSubPublisher] ] ): """ Introduces advanced logging based on topic and request id. """ def __init__( self, subscriber: GooglePubSubSubscriber, handler: Handler, deserializer: JSONDeserializerWithRequestIdRequired, publisher: Optional[GooglePubSubPublisher] = None, from_topic: str = '', ): self.from_topic = from_topic super().__init__( subscriber=subscriber, publisher=publisher, handler=handler, deserializer=deserializer, ) def on_received(self, message: Any): logger = RequestIdLogger(_LOGGER, self.from_topic) logger.info(f"Received message: {_format_message(message)}") def on_deserialized(self, original_message: Any, parsed_message: Mapping[str, Any]): assert self.deserializer is not None request_id = parsed_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.debug( f"Message successfully deserialized into attributes: {parsed_message}" ) def on_deserialization_failed(self, message: Any, error: Exception): logger = RequestIdLogger(_LOGGER, self.from_topic) logger.exception( f"Was not able to deserialize the following message: " f"{_format_message(message)}" ) def on_handled( self, original_message: Any, parsed_message: Mapping[str, Any], result: HandlingResult, ): assert self.deserializer is not None request_id = parsed_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f"Message handled, status {result.status}") def on_handling_failed( self, original_message: Any, parsed_message: Mapping[str, Any], error: Exception ): assert self.deserializer is not None request_id = parsed_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f'Failed to handle message, error {error}') def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, ): assert self.deserializer is not None request_id = '' if parsed_message is not None: request_id = parsed_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.info(f"Published result: {result.data}") def on_publishing_failed( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, error: Exception, ): assert self.deserializer is not None request_id = '' if parsed_message is not None: request_id = parsed_message[self.deserializer.request_id_field] logger = RequestIdLogger(_LOGGER, self.from_topic, request_id) logger.exception(f"Failed to publish result: {result.data}") def on_acknowledged(self, message: Any): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) logger.info('Message acknowledged.') def on_finished(self, original_message: Any, error: Optional[Exception]): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(original_message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) logger.info('Pipeline execution finished.') def on_stopped(self, original_message: Any, reason: str = ''): assert self.deserializer is not None try: msg: Mapping = self.deserializer.deserialize(original_message) req_id = msg[self.deserializer.request_id_field] except Exception: req_id = '' logger = RequestIdLogger(_LOGGER, self.from_topic, req_id) s = "." if reason == "" else f" due to the reason: {reason}." logger.info(f'Stopped pipeline{s}') class GoogleBaseReceiver(_BaseGoogleListenerWithRequestIdLogger): def __init__( self, input_schema: marshmallow.Schema, from_subscription: str, project: str, handler: Handler, from_topic: str = '', ): subscriber = GooglePubSubSubscriber( project=project, subscription_name=from_subscription ) deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) super().__init__( subscriber=subscriber, handler=handler, deserializer=deserializer, from_topic=from_topic, ) class GoogleBaseReceiveAndReply(_BaseGoogleListenerWithRequestIdLogger): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, output_schema: marshmallow.Schema, to_topic: str, project: str, from_topic: str = '', ): subscriber = GooglePubSubSubscriber( project=project, subscription_name=from_subscription ) deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) publisher = GooglePubSubPublisher( project=project, publish_all_to=to_topic, serializer=BinaryJSONSerializer(schema=output_schema), ) super().__init__( handler=handler, deserializer=deserializer, subscriber=subscriber, publisher=publisher, from_topic=from_topic, ) PK}NZ-happyly/google_pubsub/high_level/early_ack.pyfrom typing import Optional, Any from .base import GoogleBaseReceiver, GoogleBaseReceiveAndReply class GoogleEarlyAckReceiver(GoogleBaseReceiver): def _after_on_received(self, message: Optional[Any]): self.ack(message) super()._after_on_received(message) class GoogleEarlyAckReceiveAndReply(GoogleBaseReceiveAndReply): def _after_on_received(self, message: Optional[Any]): self.ack(message) super()._after_on_received(message) PK}N2R..,happyly/google_pubsub/high_level/late_ack.pyfrom typing import Optional, Any from ..high_level.base import GoogleBaseReceiver, GoogleBaseReceiveAndReply class GoogleLateAckReceiver(GoogleBaseReceiver): def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) class GoogleLateAckReceiveAndReply(GoogleBaseReceiveAndReply): def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) PKN[Nڽ  *happyly/google_pubsub/high_level/simple.pyfrom typing import Union, Optional import marshmallow from happyly._deprecations.utils import will_be_removed from .early_ack import GoogleEarlyAckReceiver, GoogleEarlyAckReceiveAndReply from happyly.handling.dummy_handler import DUMMY_HANDLER from ..deserializers import JSONDeserializerWithRequestIdRequired from ..publishers import GooglePubSubPublisher from ..serializers import BinaryJSONSerializer from happyly.handling import Handler from happyly.listening.executor import Executor class GoogleSimpleSender( Executor[Union[None, JSONDeserializerWithRequestIdRequired], GooglePubSubPublisher] ): def __init__( self, output_schema: marshmallow.Schema, to_topic: str, project: str, handler: Handler = DUMMY_HANDLER, input_schema: Optional[marshmallow.Schema] = None, ): if input_schema is None: deserializer = None else: deserializer = JSONDeserializerWithRequestIdRequired(schema=input_schema) publisher = GooglePubSubPublisher( project=project, publish_all_to=to_topic, serializer=BinaryJSONSerializer(schema=output_schema), ) super().__init__( publisher=publisher, handler=handler, deserializer=deserializer ) class GoogleSimpleReceiver(GoogleEarlyAckReceiver): def __init__(self, *args, **kwargs): will_be_removed('GoogleSimpleReceiver', GoogleEarlyAckReceiver, '0.8.0') super().__init__(*args, **kwargs) class GoogleSimpleReceiveAndReply(GoogleEarlyAckReceiveAndReply): def __init__(self, *args, **kwargs): will_be_removed( 'GoogleSimpleReceiveAndReply', GoogleEarlyAckReceiveAndReply, '0.8.0' ) super().__init__(*args, **kwargs) class GoogleReceiveAndReplyComponent(GoogleEarlyAckReceiveAndReply): def __init__(self, *args, **kwargs): will_be_removed( 'GoogleReceiveAndReplyComponent', GoogleEarlyAckReceiveAndReply, '0.8.0' ) super().__init__(*args, **kwargs) PK}Nn].happyly/google_pubsub/high_level/with_cache.pyimport marshmallow from happyly.caching.cacher import Cacher from happyly.caching.mixins import CacheByRequestIdMixin from happyly.handling import Handler from .simple import GoogleSimpleReceiveAndReply, GoogleSimpleReceiver class GoogleCachedReceiveAndReply(CacheByRequestIdMixin, GoogleSimpleReceiveAndReply): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, from_topic: str, output_schema: marshmallow.Schema, to_topic: str, project: str, cacher: Cacher, ): GoogleSimpleReceiveAndReply.__init__( self, handler=handler, input_schema=input_schema, from_subscription=from_subscription, output_schema=output_schema, to_topic=to_topic, project=project, from_topic=from_topic, ) CacheByRequestIdMixin.__init__(self, cacher) class GoogleCachedReceiver(CacheByRequestIdMixin, GoogleSimpleReceiver): def __init__( self, handler: Handler, input_schema: marshmallow.Schema, from_subscription: str, from_topic: str, project: str, cacher: Cacher, ): GoogleSimpleReceiver.__init__( self, handler=handler, input_schema=input_schema, from_subscription=from_subscription, project=project, from_topic=from_topic, ) CacheByRequestIdMixin.__init__(self, cacher) PKeN.ȼ*happyly/handling/__init__.pyfrom .handler import Handler # noqa: F401 from .handling_result import HandlingResult, HandlingResultStatus # noqa: F401 from .dummy_handler import DUMMY_HANDLER # noqa: F401 PKeNBK!happyly/handling/dummy_handler.pyfrom typing import Mapping, Any from happyly.handling.handler import Handler class _DummyHandler(Handler): def handle(self, message: Mapping[str, Any]): return message def on_handling_failed(self, message: Mapping[str, Any], error: Exception): raise error DUMMY_HANDLER: _DummyHandler = _DummyHandler() """ Handler which just returns provided message attributes (kind of an "identity function") """ PK}N{g^^happyly/handling/handler.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any from .types import ZeroToManyParsedMessages from .handling_result import HandlingResult _no_base_impl = NotImplementedError('No default implementation in base Handler class') class Handler(ABC): """ A class containing logic to handle a parsed message. """ @abstractmethod def handle(self, message: Mapping[str, Any]) -> ZeroToManyParsedMessages: """ Applies logic using a provided message, optionally gives back one or more results. Each result consists of message attributes which can be serialized and sent. When fails, calls `on_handling_failed` :param message: A parsed message as a dictionary of attributes :return: None if no result is extracted from handling, a dictionary of attributes for single result or a list of dictionaries if handling provides multiple results """ raise _no_base_impl @abstractmethod def on_handling_failed( self, message: Mapping[str, Any], error: Exception ) -> ZeroToManyParsedMessages: """ Applies fallback logic using a provided message when `handle` fails, optionally gives back one or more results. Enforces users of `Handler` class to provide explicit strategy for errors. If you want to propagate error further to the underlying Executor/Handler, just raise an exception here. :param message: A parsed message as a dictionary of attributes :param error: Error raised by `handle` :return: None if no result is extracted from handling, a dictionary of attributes for single result or a list of dictionaries if handling provides multiple results """ raise _no_base_impl def __call__(self, message: Mapping[str, Any]) -> HandlingResult: try: result_data = self.handle(message) return HandlingResult.ok(result_data) except Exception as e: result_data = self.on_handling_failed(message, e) return HandlingResult.err(result_data) PKeN#happyly/handling/handling_result.pyfrom enum import Enum from attr import attrs from .types import ZeroToManyParsedMessages class HandlingResultStatus(Enum): OK = 'OK' ERR = 'ERR' @attrs(auto_attribs=True, frozen=True) class HandlingResult: status: HandlingResultStatus data: ZeroToManyParsedMessages @classmethod def ok(cls, data): return HandlingResult(status=HandlingResultStatus.OK, data=data) @classmethod def err(cls, data): return HandlingResult(status=HandlingResultStatus.ERR, data=data) PK"lNfhappyly/handling/types.pyfrom typing import Mapping, Any, Union, List _ParsedMessage = Mapping[str, Any] ZeroToManyParsedMessages = Union[_ParsedMessage, List[_ParsedMessage], None] PK}NFjhappyly/listening/__init__.py# flake8: noqa F401 from .listener import Listener, BaseListener, EarlyAckListener, LateAckListener from .executor import Executor PKQWNduh*h*happyly/listening/executor.pyimport logging from typing import Mapping, Any, Optional, TypeVar, Generic from attr import attrs from happyly.exceptions import StopPipeline from happyly.handling.dummy_handler import DUMMY_HANDLER from happyly.handling import Handler, HandlingResult from happyly.serialization.deserializer import Deserializer from happyly.pubsub import Publisher from happyly.serialization import DUMMY_DESERIALIZER _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=Publisher) @attrs(auto_attribs=True) class Executor(Generic[D, P]): """ Component which is able to run handler as a part of more complex pipeline. Implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and introduces callbacks between the stages which can be easily overridden. Executor does not implement stages themselves, it takes internal implementation of stages from corresponding components: handler, deserializer, publisher. It means that executor is universal and can work with any serialization/messaging technology depending on concrete components provided to executor's constructor. """ handler: Handler = DUMMY_HANDLER """ Provides implementation of handling stage to Executor. """ deserializer: Optional[D] = None """ Provides implementation of deserialization stage to Executor. If not present, no deserialization is performed. """ publisher: Optional[P] = None """ Provides implementation of serialization and publishing stages to Executor. If not present, no publishing is performed. """ def __attrs_post_init__(self): if self.deserializer is None: self.deserializer = DUMMY_DESERIALIZER def on_received(self, message: Any): """ Callback which is called as soon as pipeline is run. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization """ _LOGGER.info(f"Received message: {message}") def on_deserialized(self, original_message: Any, parsed_message: Mapping[str, Any]): """ Callback which is called right after message was deserialized successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization """ _LOGGER.info( f"Message successfully deserialized into attributes: {parsed_message}" ) def on_deserialization_failed(self, message: Any, error: Exception): """ Callback which is called right after deserialization failure. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization :param error: exception object which was raised """ _LOGGER.exception( f"Was not able to deserialize the following message: {message}" ) def on_handled( self, original_message: Any, parsed_message: Mapping[str, Any], result: HandlingResult, ): """ Callback which is called right after message was handled (successfully or not, but without raising an exception). Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) """ _LOGGER.info(f"Message handled, status {result.status}") def on_handling_failed( self, original_message: Any, parsed_message: Mapping[str, Any], error: Exception ): """ Callback which is called if handler's `on_handling_failed` raises an exception. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param error: exception object which was raised """ _LOGGER.exception(f'Handler raised an exception.') def on_published( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, ): """ Callback which is called right after message was published successfully. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) """ _LOGGER.info(f"Published result: {result}") def on_publishing_failed( self, original_message: Any, parsed_message: Optional[Mapping[str, Any]], result: HandlingResult, error: Exception, ): """ Callback which is called when publisher fails to publish. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param original_message: Message as it has been received, without any deserialization :param parsed_message: Message attributes after deserialization :param result: Result fetched from handler (also shows if handling was successful) :param error: exception object which was raised """ _LOGGER.exception(f"Failed to publish result: {result}") def on_finished(self, original_message: Any, error: Optional[Exception]): """ Callback which is called when pipeline finishes its execution. Is guaranteed to be called unless pipeline is stopped via StopPipeline. :param original_message: Message as it has been received, without any deserialization :param error: exception object which was raised or None """ _LOGGER.info('Pipeline execution finished.') def on_stopped(self, original_message: Any, reason: str = ''): """ Callback which is called when pipeline is stopped via StopPipeline :param original_message: Message as it has been received, without any deserialization :param reason: message describing why the pipeline stopped """ s = "." if reason == "" else f" due to the reason: {reason}." _LOGGER.info(f'Stopped pipeline{s}') def _when_parsing_succeeded(self, original: Any, parsed: Mapping[str, Any]): try: result = self.handler(parsed) except Exception as e: self.on_handling_failed(original, parsed, e) self.on_finished(original, e) return else: self.on_handled( original_message=original, parsed_message=parsed, result=result ) if self.publisher is not None: self._try_publish(original, parsed, result) else: self.on_finished(original_message=original, error=None) def _when_parsing_failed(self, message: Any, error: Exception): assert self.deserializer is not None if self.publisher is None: self.on_finished(original_message=message, error=error) return try: result = self.deserializer.build_error_result(message, error) handling_result = HandlingResult.err(result) except Exception as new_err: _LOGGER.exception( "Deserialization failed and error result cannot be built." ) self.on_finished(message, new_err) else: self._try_publish(original=message, parsed=None, result=handling_result) def _try_publish( self, original: Any, parsed: Optional[Mapping[str, Any]], result: HandlingResult ): assert self.publisher is not None try: self.publisher.publish_result(result) except Exception as e: self.on_publishing_failed( original_message=original, parsed_message=parsed, result=result, error=e ) self.on_finished(original, error=e) else: self.on_published( original_message=original, parsed_message=parsed, result=result ) self.on_finished(original, error=None) def _after_on_received(self, message: Optional[Any]): assert self.deserializer is not None try: parsed = self.deserializer.deserialize(message) except Exception as e: self.on_deserialization_failed(message, error=e) self._when_parsing_failed(message, error=e) else: self.on_deserialized(message, parsed) self._when_parsing_succeeded(original=message, parsed=parsed) def run(self, message: Optional[Any] = None): """ Method that starts execution of pipeline stages. To stop the pipeline raise StopPipeline inside any callback. :param message: Message as is, without deserialization. Or message attributes if the executor was instantiated with neither a deserializer nor a handler (useful to quickly publish message attributes by hand) """ try: self.on_received(message) self._after_on_received(message) except StopPipeline as e: self.on_stopped(original_message=message, reason=e.reason) if __name__ == '__main__': class StoppingExecutor(Executor): def on_deserialized( self, original_message: Any, parsed_message: Mapping[str, Any] ): super().on_deserialized(original_message, parsed_message) raise StopPipeline("the sky is very high") logging.basicConfig(level=logging.INFO) StoppingExecutor(lambda m: HandlingResult.ok(42)).run() # type: ignore Executor(lambda m: HandlingResult.ok(42)).run() # type: ignore PKM[NW!happyly/listening/listener.pyimport logging from typing import Any, TypeVar, Optional, Generic from happyly._deprecations.utils import will_be_removed from happyly.handling import Handler from happyly.handling.dummy_handler import DUMMY_HANDLER from happyly.pubsub import Publisher from happyly.pubsub.subscriber import BaseSubscriber, SubscriberWithAck from happyly.serialization import Deserializer from .executor import Executor _LOGGER = logging.getLogger(__name__) D = TypeVar("D", bound=Deserializer) P = TypeVar("P", bound=Publisher) S = TypeVar("S", bound=BaseSubscriber) class BaseListener(Executor[D, P], Generic[D, P, S]): """ Listener is a form of Executor which is able to run pipeline by an event coming from a subscription. Listener itself doesn't know how to subscribe, it subscribes via a provided subscriber. As any executor, implements managing of stages inside the pipeline (deserialization, handling, serialization, publishing) and contains callbacks between the stages which can be easily overridden. As any executor, listener does not implement stages themselves, it takes internal implementation of stages from corresponding components: handler, deserializer, publisher. It means that listener is universal and can work with any serialization/messaging technology depending on concrete components provided to listener's constructor. """ def __init__( self, subscriber: S, handler: Handler, deserializer: D, publisher: Optional[P] = None, ): assert handler is not DUMMY_HANDLER super().__init__( handler=handler, deserializer=deserializer, publisher=publisher ) self.subscriber: S = subscriber """ Provides implementation of how to subscribe. """ def start_listening(self): return self.subscriber.subscribe(callback=self.run) class ListenerWithAck(BaseListener[D, P, SubscriberWithAck], Generic[D, P]): """ Acknowledge-aware listener. Defines `ListenerWithAck.ack()`. Subclass ListenerWithAck and specify when to ack by overriding the corresponding callbacks. """ def __init__( self, subscriber: SubscriberWithAck, handler: Handler, deserializer: D, publisher: Optional[P] = None, ): super().__init__( handler=handler, deserializer=deserializer, publisher=publisher, subscriber=subscriber, ) def on_acknowledged(self, message: Any): """ Callback which is called write after message was acknowledged. Override it in your custom Executor/Listener if needed, but don't forget to call implementation from base class. :param message: Message as it has been received, without any deserialization """ _LOGGER.info('Message acknowledged') def ack(self, message: Any): """ Acknowledge the message using implementation from subscriber, then log success. :param message: Message as it has been received, without any deserialization """ self.subscriber.ack(message) self.on_acknowledged(message) class EarlyAckListener(ListenerWithAck[D, P], Generic[D, P]): """ Acknowledge-aware listener, which performs `ack` right after `on_received` callback is finished. """ def _after_on_received(self, message: Optional[Any]): self.ack(message) super()._after_on_received(message) class LateAckListener(ListenerWithAck[D, P], Generic[D, P]): """ Acknowledge-aware listener, which performs `ack` at the very end of pipeline. """ def on_finished(self, original_message: Any, error: Optional[Exception]): self.ack(original_message) super().on_finished(original_message, error) class Listener(EarlyAckListener[D, P], Generic[D, P]): def __init__(self, *args, **kwargs): will_be_removed('Listener', EarlyAckListener, '0.7.0') super().__init__(*args, **kwargs) PK}Nhappyly/logs/__init__.pyPK}N%%happyly/logs/base.pyfrom abc import ABC, abstractmethod _not_impl = NotImplementedError('No default implementation in base logger class') class BaseLogger(ABC): @abstractmethod def info(self, message: str): raise _not_impl @abstractmethod def debug(self, message: str): raise _not_impl @abstractmethod def warning(self, message: str): raise _not_impl @abstractmethod def exception(self, message: str): raise _not_impl @abstractmethod def error(self, message: str): raise _not_impl PK}Nhappyly/logs/mixins.pyPKzZN-happyly/logs/request_id.pyfrom logging import Logger from attr import attrs from .base import BaseLogger @attrs(auto_attribs=True) class RequestIdLogger(BaseLogger): logger: Logger topic: str = '' request_id: str = '' def _fmt(self, message): return f' {self.topic:>35} | {self.request_id:>40} |> {message}' def info(self, message: str): self.logger.info(self._fmt(message)) def debug(self, message: str): self.logger.debug(self._fmt(message)) def warning(self, message: str): self.logger.warning(self._fmt(message)) def exception(self, message: str): self.logger.exception(self._fmt(message)) def error(self, message: str): self.logger.error(self._fmt(message)) PK}N<happyly/pubsub/__init__.pyfrom .publisher import Publisher # noqa: F401 from .subscriber import Subscriber, SubscriberWithAck, BaseSubscriber # noqa: F401 PK}mNO[  happyly/pubsub/publisher.pyfrom abc import ABC, abstractmethod from typing import Optional, Mapping, Any, List from happyly.handling import HandlingResult, HandlingResultStatus from happyly.serialization.serializer import Serializer class Publisher(ABC): def __init__( self, serializer: Serializer, publish_all_to: Optional[str] = None, publish_success_to: Optional[str] = None, publish_failure_to: Optional[str] = None, ): self._serializer = serializer if publish_all_to is not None and all( p is None for p in [publish_success_to, publish_failure_to] ): self.publish_success_to: str = publish_all_to self.publish_failure_to: str = publish_all_to elif ( publish_success_to is not None and publish_failure_to is not None ) and publish_all_to is None: self.publish_success_to: str = publish_success_to self.publish_failure_to: str = publish_failure_to else: raise ValueError( """Provide "publish_all_to" only, or else provide both "publish_success_to" and "publish_failure_to""" ) @abstractmethod def publish_message(self, serialized_message: Any, to: str): raise NotImplementedError("No default implementation in base Publisher class") def _get_destination(self, status: HandlingResultStatus): if status == HandlingResultStatus.OK: return self.publish_success_to elif status == HandlingResultStatus.ERR: return self.publish_failure_to else: raise ValueError(f"Unknown status {status}") def _publish_serialized(self, data: Mapping[str, Any], to: str): serialized = self._serializer.serialize(data) self.publish_message(serialized, to) def publish_result(self, result: HandlingResult): data = result.data if data is None: return destination = self._get_destination(result.status) if isinstance(data, Mapping): self._publish_serialized(data, to=destination) elif isinstance(data, List): for item in data: self._publish_serialized(item, to=destination) else: raise ValueError("Invalid data structure") PKM[Nnj'sshappyly/pubsub/subscriber.pyfrom abc import ABC, abstractmethod from typing import Callable, Any from happyly._deprecations.utils import will_be_removed class BaseSubscriber(ABC): @abstractmethod def subscribe(self, callback: Callable[[Any], Any]): raise NotImplementedError class SubscriberWithAck(BaseSubscriber, ABC): @abstractmethod def ack(self, message): raise NotImplementedError # for compatibility, to be deprecated class Subscriber(SubscriberWithAck, ABC): def __init__(self, *args, **kwargs): will_be_removed('Subscriber', SubscriberWithAck, '0.7.0') super().__init__(*args, **kwargs) PKeN_))happyly/schemas/__init__.pyfrom .schema import Schema # noqa: F401 PKeN, happyly/schemas/schema.pyimport marshmallow class Schema(marshmallow.Schema): """ Marshmallow schema, which raises errors on mismatch (extra fields provided also raise exception). Subclass it just like any marshmallow Schema to describe schema. Instantiation with no arguments is a good strict default, but you can pass any arguments valid for `marshmallow.Schema` """ def __init__(self, *args, **kwargs): super().__init__(strict=True, *args, **kwargs) @marshmallow.validates_schema(pass_original=True) def check_unknown_fields(self, data, original_data): unknown = set(original_data) - set(self.fields) if unknown: raise marshmallow.ValidationError('Unknown field', unknown) PK}Nj!happyly/serialization/__init__.pyfrom .deserializer import Deserializer # noqa: F401 from .serializer import Serializer # noqa: F401 from .dummy import DUMMY_DESERIALIZER # noqa: F401 PKeNJ%happyly/serialization/deserializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any _not_impl = NotImplementedError('No default implementation in base Deserializer class') class Deserializer(ABC): @abstractmethod def deserialize(self, message: Any) -> Mapping[str, Any]: raise _not_impl @abstractmethod def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: raise _not_impl PK}N,AUhappyly/serialization/dummy.pyfrom typing import Any, Mapping from .deserializer import Deserializer class DummyDeserializer(Deserializer): def deserialize(self, message) -> Mapping[str, Any]: if isinstance(message, Mapping): return message elif message is None: return {} else: raise ValueError( 'Dummy deserializer requires message attributes ' 'in form of dict-like structure as input' ) def build_error_result(self, message: Any, error: Exception) -> Mapping[str, Any]: raise error DUMMY_DESERIALIZER: DummyDeserializer = DummyDeserializer() PKeNe,,#happyly/serialization/serializer.pyfrom abc import ABC, abstractmethod from typing import Mapping, Any _no_default = NotImplementedError('No default implementation in base Serializer class') class Serializer(ABC): @abstractmethod def serialize(self, message_attributes: Mapping[str, Any]) -> Any: raise _no_default PK}NN''happyly-0.6.0.dist-info/LICENSEMIT License Copyright (c) 2019 Equeum Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!HPOhappyly-0.6.0.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,szd&Y)r$[)T&UrPK!H{ happyly-0.6.0.dist-info/METADATAVYo6~ׯ@,Wt;u68F4("$ZbM,ݨ3ԮwzM7^_)첝b vPCvw͞2=w. JVpc\Ī{ t%u˲Ӌܢ `(ZX3צX<?;<{'qBkLXXeGA)) W>rmf\\)p%GBgDq֙h IH'|>f7Ã=,{#| C~lt:WEA| rۘΎ^HވP a:}5zJQlе ͅ={e܄9wb#?T:kgnC7&S8֙ӫɅ3}6Rew/bZ<_QF Ã]%keb?a:RWpۭ2xELpU IL6-Ŝ&@{ %hYv .ߟ,!ɾeB5ISbtJ̸vT^N(ʢ vXk![ e= [M:` 7WmQqy'4 & q4-mGs4m7X^RqQXvTpxR O1xuyE-){rB *YwP#Pe ,_qSQJv&d{VaB'=h|f^qByY,RvA"dd0OgtH <[:+J'CM M)(̩*;D} uXLǪUoMt³N R񩢳ttBi?eP y]g<$]TRKGelI \7JroB'SQ@p6K,c )}a{t׷}fjOR,>s~tC5^"j1,c&u` j+59TjNs@Hc%e4NE(W1>%^q?[E9n;Zc1b7W0Jh2D*fx)~GkujP,2M-=[^L.lz?:/MNaSO`\2='l%:;p灷PK!H/happyly-0.6.0.dist-info/RECORDɶX,p@S^H/}<}79̵F'_;w/izHߨtU:]!4MO'*;?v01*6>;0A~2"w`p &e> |e=u8;i޽0 mIk\˓@65*9*14H~.lS/Oװ}q"5yhzF; x 6-JabvACV7.>)D^0Z;{ѽ]Mr ػ0H;ѱ8tl -3fka~bz A(3oWe%0A!jD*U ],iH+]lYap@¾coߦO 'Xχ Wnr46y#`&%K YlL%ڤ6zpBȳ$"w%FuZ+eN2L]B;UBr9r7%AçO0K"c.X9VpZ^"DZ3A(I~L[AfMEw8o,YRS=FlGFP XϟIdVZ(Wky'P8r`9i1{J.+5csifa?=piwO@@J`ohnuV&.-5_m,e`>ӝ<mPDԡ_U.ybwdb6;HL[<]p>,ts1EQZ@5N}a[A7p^,"]N1zY6qsiu8rwv7ź@(7?>u8=+'N=G/E*9E05yO_͉ZsxvdkqzԬ;U,& ya3]0C v3ĝi~>ue/G~OcT7(9y u& *&Ci|"gNr|%_gMK-֐>Zw}V=mZM*MMa89_t=dٖ;Wlj:[~E-g ^D e}{ipEBcQ3a{˛ giI;Ӭe^_]eE}񐶿vKvɹȐV27ɺ|1&U9fRh|e+C\ 1քުԿQoZW$\V3k#6Qmik׋~0EҦsxZ b4z8uGB(nm~‘? F~P?ᴌTWVѐZBaJ͢6٤rmQ_TтatbsD5oO骊jc/͚]x}an~awE~wVKiV6k瞤hOx[wǃ PK"sNhLicchappyly/__init__.pyPK}N!happyly/_deprecations/__init__.pyPKN[NRrTThappyly/_deprecations/utils.pyPKeNv))chappyly/caching/__init__.pyPK}Nooohappyly/caching/cacher.pyPK}NW^k happyly/caching/mixins.pyPK扁Nޢ:66happyly/exceptions/__init__.pyPK扁NΓr#happyly/exceptions/stop_pipeline.pyPK}NIUKK!happyly/google_pubsub/__init__.pyPK}Nά&Ehappyly/google_pubsub/deserializers.pyPKeN#"happyly/google_pubsub/publishers.pyPK}Nץrr%:!happyly/google_pubsub/redis_cacher.pyPKmN%v$%happyly/google_pubsub/serializers.pyPK}NVX$'happyly/google_pubsub/subscribers.pyPK}N,+happyly/google_pubsub/high_level/__init__.pyPKXNްr(-happyly/google_pubsub/high_level/base.pyPK}NZ-Hhappyly/google_pubsub/high_level/early_ack.pyPK}N2R..,Jhappyly/google_pubsub/high_level/late_ack.pyPKN[Nڽ  *QMhappyly/google_pubsub/high_level/simple.pyPK}Nn].Uhappyly/google_pubsub/high_level/with_cache.pyPKeN.ȼ*\happyly/handling/__init__.pyPKeNBK!\happyly/handling/dummy_handler.pyPK}N{g^^^happyly/handling/handler.pyPKeN#vghappyly/handling/handling_result.pyPK"lNfihappyly/handling/types.pyPK}NFjjhappyly/listening/__init__.pyPKQWNduh*h*Qkhappyly/listening/executor.pyPKM[NW!happyly/listening/listener.pyPK}NGhappyly/logs/__init__.pyPK}N%%}happyly/logs/base.pyPK}NԨhappyly/logs/mixins.pyPKzZN-happyly/logs/request_id.pyPK}N<happyly/pubsub/__init__.pyPK}mNO[  ֬happyly/pubsub/publisher.pyPKM[Nnj'sshappyly/pubsub/subscriber.pyPKeN_))˸happyly/schemas/__init__.pyPKeN, -happyly/schemas/schema.pyPK}Nj!Ahappyly/serialization/__init__.pyPKeNJ%happyly/serialization/deserializer.pyPK}N,AUhappyly/serialization/dummy.pyPKeNe,,#happyly/serialization/serializer.pyPK}NN''/happyly-0.6.0.dist-info/LICENSEPK!HPOhappyly-0.6.0.dist-info/WHEELPK!H{ happyly-0.6.0.dist-info/METADATAPK!H/phappyly-0.6.0.dist-info/RECORDPK-- l