def publish(self, event: Event): if hasattr(self, "info_id"): event = event.add_info_id(self.info_id) if hasattr(self, "additional_meta"): event = event.update_meta(self.additional_meta) if not event or not issubclass(event.__class__, Event): raise TypeError("Bus only publishes petisco.Event objects") try: channel = self.connector.get_channel(self.rabbitmq_key) routing_key = RabbitMqEventQueueNameFormatter.format( event, exchange_name=self.exchange_name) channel.basic_publish( exchange=self.exchange_name, routing_key=routing_key, body=event.to_json(), properties=self.properties, ) channel.close() except ChannelClosedByBroker: # If Event queue is not configured, it will be configured and publication retried. self.configurer.configure_event(event) self.publish(event)
def publish(self, event: Event): if hasattr(self, "info_id"): event = event.add_info_id(self.info_id) if hasattr(self, "additional_meta"): event = event.update_meta(self.additional_meta) if not event or not issubclass(event.__class__, Event): raise TypeError("Bus only publishes petisco.Event objects")
def retry_publish_only_on_store_queue(self, event: Event): if not event or not issubclass(event.__class__, Event): raise TypeError("Bus only publishes petisco.Event objects") channel = self.connector.get_channel(self.rabbitmq_key) channel.basic_publish( exchange=self.exchange_name, routing_key="retry.store", body=event.to_json(), properties=self.properties, )
def publish(self, event: Event): if not event: return channel = self._get_channel() routing_key = self._get_event_routing_key(event) channel.basic_publish( exchange=self.exchange, routing_key=routing_key, body=event.to_json(), properties=self.properties, ) channel.close()
def wrapper(*args, **kwargs): @meiga def run_subscriber(**kwargs) -> Result: params = inspect.getfullargspec(func).args kwargs = {k: v for k, v in kwargs.items() if k in params} return func(**kwargs) self._check_logger() self._check_notifier() ch, method, properties, body = args log_message = LogMessage(layer="subscriber", operation=f"{func.__name__}") self.logger.log( DEBUG, log_message.set_message( {"routing_key": method.routing_key, "body": json.loads(body)} ), ) if self._nack_simulation(): ch.basic_nack(delivery_tag=method.delivery_tag) self._log_nack_simulation(log_message) return if self._filter_by_routing_key(method.routing_key): ch.basic_nack(delivery_tag=method.delivery_tag) self._log_filter_by_routing_key(log_message) return try: event = Event.from_json(body) except TypeError: event = Event.from_deprecated_json(body) except: # noqa E722 self._log_invalid_event_format(log_message, body) return ch.basic_nack(delivery_tag=method.delivery_tag) kwargs = dict(event=event, routing_key=RoutingKey(method.routing_key)) try: result = run_subscriber(**kwargs) except Exception as exception: result = Failure( UnknownError( exception=exception, input_parameters=kwargs if len(kwargs) > 0 else args, executor=f"{func.__name__} (Subscriber)", traceback=traceback.format_exc(), ) ) if self.delay_after: time.sleep(self.delay_after) self.notify(result) if result is None or result.is_failure: message = f"{result}: {traceback.format_exc()}" self.logger.log(ERROR, log_message.set_message(message)) ch.basic_nack(delivery_tag=method.delivery_tag) else: ch.basic_ack(delivery_tag=method.delivery_tag)
def main_handler_requeue(event: Event): print(f"main_handler_requeue: {event.to_json()}") with open(filename_main_handler_requeue, "a+") as fp: fp.write(event.to_json() + "\n") return isSuccess
def main_handler(event: Event): print(f"main_handler: {event.to_json()}") with open(filename_main_handler, "a+") as fm: fm.write(event.to_json() + "\n") return isSuccess
def test_should_work_successfully_a_happy_path_pub_sub_with_two_subscribers_and_closing_one_of_them( given_any_petisco, make_user_created_event, given_random_organization, given_random_service, given_random_topic, ): event_before_delete_letter_stop = make_user_created_event("user_id_1") event_after_delete_letter_stop_1 = make_user_created_event( "user_id_after_delete_letter_stop_1" ) event_after_delete_letter_stop_2 = make_user_created_event( "user_id_after_delete_letter_stop_2" ) event_after_delete_letter_stop_3 = make_user_created_event( "user_id_after_delete_letter_stop_3" ) filename_main_handler = "filename_main_handler.txt" filename_main_handler_requeue = "filename_main_handler_requeue.txt" if os.path.exists(filename_main_handler): os.remove(filename_main_handler) if os.path.exists(filename_main_handler_requeue): os.remove(filename_main_handler_requeue) @subscriber_handler() def main_handler(event: Event): print(f"main_handler: {event.to_json()}") with open(filename_main_handler, "a+") as fm: fm.write(event.to_json() + "\n") return isSuccess @subscriber_handler() def main_handler_requeue(event: Event): print(f"main_handler_requeue: {event.to_json()}") with open(filename_main_handler_requeue, "a+") as fp: fp.write(event.to_json() + "\n") return isSuccess subscriber = RabbitMQEventSubscriber( connector=RabbitMqConnector(), subscribers={ "auth": ConfigEventSubscriber( organization=given_random_organization, service=given_random_service, topic=given_random_topic, handler=main_handler, ) }, connection_name="subscriber", ) dl_subscriber = RabbitMQEventSubscriber( connector=RabbitMqConnector(), subscribers={ "dl-auth": ConfigEventSubscriber( organization=given_random_organization, service=given_random_service, topic=given_random_topic, handler=main_handler_requeue, dead_letter=True, ) }, connection_name="dl-subscriber", ) publisher = RabbitMQEventPublisher( connector=RabbitMqConnector(), organization=given_random_organization, service=given_random_service, topic=given_random_topic, ) subscriber.start() dl_subscriber.start() await_for_it(1.5) publisher.publish(event_before_delete_letter_stop) await_for_it(1.5) dl_subscriber.stop() await_for_it(5.5) publisher.publish_events( [ event_after_delete_letter_stop_1, event_after_delete_letter_stop_2, event_after_delete_letter_stop_3, ] ) await_for_it(1.5) with open(filename_main_handler, "r") as fmm: lines = fmm.readlines() events = [] for line in lines: events.append(Event.from_json(line)) assert event_before_delete_letter_stop == events[0] assert event_after_delete_letter_stop_1 == events[1] assert event_after_delete_letter_stop_2 == events[2] assert event_after_delete_letter_stop_3 == events[3] assert not os.path.exists(filename_main_handler_requeue) os.remove(filename_main_handler) subscriber.stop()
def rabbitmq_consumer( ch: BlockingChannel, method: Basic.Deliver, properties: BasicProperties, body: bytes, ): self.printer.print_received_message(method, properties, body) if self.chaos.nack_simulation(ch, method): self.consumer_logger.log_nack_simulation( method, properties, body, handler) return else: self.consumer_logger.log(method, properties, body, handler, log_activity="received_message") try: event = Event.from_json(body) except TypeError: event = Event.from_deprecated_json(body) except Exception as e: self.consumer_logger.log_parser_error(method, properties, body, handler, e) ch.basic_nack(delivery_tag=method.delivery_tag) return self.chaos.delay() if self.chaos.failure_simulation(method): self.consumer_logger.log_failure_simulation( method, properties, body, handler) result = Failure(EventChaosError()) else: params = inspect.getfullargspec(handler).args if "event_bus" in params: connector = RabbitMqConsumerConnector(ch) event_bus = RabbitMqConsumerEventBus( connector, self.organization, self.service) result = handler(event, event_bus) else: result = handler(event) self.printer.print_context(handler, result) if result is None: raise RabbitMqEventConsumerReturnError(handler) derived_action = ConsumerDerivedAction() if result.is_failure: if not properties.headers: properties.headers = { "queue": f"{method.routing_key}.{handler.__name__}" } derived_action = self.handle_consumption_error( ch, method, properties, body, is_store) else: ch.basic_ack(delivery_tag=method.delivery_tag) self.consumer_logger.log( method, properties, body, handler, "computed_message", result, derived_action, ) self.printer.print_separator()