class Konexion: """Encapsula la conexión a RabbitMQ usando Kombu""" enlace = None canal = None estafeta = None productor = None cola = None def __init__(self, dotenv_path=".env"): load_dotenv(dotenv_path=dotenv_path) url = 'amqp://{}:{}@localhost:5672/platziv'.format( os.environ.get('TEST_USER'), os.environ.get('TEST_PASS')) self.enlace = Connection(url) self.canal = self.enlace.channel() # self.estafeta = Exchange("platzie", type="direct") self.estafeta = Exchange("", type="direct") self.productor = Producer(exchange=self.estafeta, channel=self.canal, routing_key="platziq") self.cola = Queue(name="platziq", exchange=self.estafeta, routing_key="platziq") self.cola.maybe_bind(self.enlace)
def _register_retry_queues(self, **_: Any) -> None: """ Initializes a set of AMQP primitives to implement broker-based delays. """ channel = self.broker_connection().default_channel for queue in self.conf.task_queues: retry_queue = Queue( name=f'{queue.name}.retry', routing_key=f'{queue.routing_key}.retry', exchange=queue.exchange, queue_arguments={ "x-dead-letter-exchange": "", "x-dead-letter-routing-key": queue.name } ) retry_queue.declare(channel=channel) retry_queue.maybe_bind(channel=channel) archived_queue = Queue( name=f'{queue.name}.archived', routing_key=f'{queue.routing_key}.archived', exchange=queue.exchange, queue_arguments={ "x-message-ttl": defaults.AMQP_EVENTS_ARCHIVED_MESSAGE_TTL, "x-max-length": defaults.AMQP_EVENTS_ARCHIVED_QUEUE_LENGTH, "x-queue-mode": "lazy" }) archived_queue.declare(channel=channel) archived_queue.maybe_bind(channel=channel)
def send_address_to_queue(message): logger.info('Starting to send a message to {} queue'.format( ADDRESS_PRODUCER_QUEUE)) with Connection(settings.BROKER_URL) as connection: logger.info('Connected into the broker with success') connection.connect() channel = connection.channel() exchange = Exchange(ADDRESS_EXCHANGE, type='direct') producer = Producer( channel=channel, routing_key=ADDRESS_PRODUCER_ROUTING_KEY, exchange=exchange, ) queue = Queue( name=ADDRESS_PRODUCER_QUEUE, routing_key=ADDRESS_PRODUCER_ROUTING_KEY, exchange=exchange, ) from api.order.serializers import AddressSerializer address_serializer = AddressSerializer(message).data queue.maybe_bind(connection) queue.declare() producer.publish(address_serializer) connection.close()
class EventsPublisher(): def __init__(self): self.rabbit_url = 'amqp://localhost:5672/' self.conn = Connection(self.rabbit_url) self.channel = self.conn.channel() self.exchange = Exchange(name='gateway-exchange', type='fanout') self.producer = Producer(exchange=self.exchange, channel=self.channel, routing_key='gateway') self.queue = Queue(name='gateway-queue', exchange=self.exchange, routing_key='gateway') self.queue.maybe_bind(self.conn) self.queue.declare() def publish(self, body): # body = { # # "id": "bb2cdchl52n4orsopmtg", # "status": 1, # "location": { # "type": "Point", # "coordinates": [2.2861460, 48.8268020], # } # } self.producer.publish(body, serializer='json') logging.info('*** Event published is: {}'.format(body))
def run(rabbit_url): print rabbit_url conn = Connection(rabbit_url) conn.ensure_connection() conn.connect() channel = conn.channel() exchange = Exchange(config.EXCHANGE_NAME, type='direct') producer = Producer(exchange=exchange, channel=channel, routing_key=config.ROUTING_KEY) queue = Queue(name=config.QUEUE_NAME, exchange=exchange, routing_key=config.ROUTING_KEY) queue.maybe_bind(conn) queue.declare() index = 0 while True: try: time.sleep(1) print 'producer' index += 1 producer.publish("send message -- %s" % index) except socket.timeout: pass
def send_geo_location_to_queue(address_id, geo_location): with Connection(settings.BROKER_URL) as connection: connection.connect() channel = connection.channel() exchange = Exchange(ADDRESS_EXCHANGE, type='direct') producer = Producer( channel=channel, routing_key=ADDRESS_CUSTOMER_PRODUCER_ROUTING_KEY, exchange=exchange, ) queue = Queue( name=ADDRESS_CUSTOMER_PRODUCER_QUEUE, routing_key=ADDRESS_CUSTOMER_PRODUCER_ROUTING_KEY, exchange=exchange, ) geo_location_from_address = geo_location geo_location_from_address.update({'id': address_id}) queue.maybe_bind(connection) queue.declare() producer.publish(geo_location_from_address) connection.close()
def _register_retry_queues(self, **_: Any) -> None: """ Initializes a set of AMQP primitives to implement broker-based delays. Declares an exchange/queue pair for each delay stage defined by `amqp_events.defaults:AMQP_EVENTS_MAX_RETRIES`. Each exchange has a single bound queue; when task needs to be delayed, it's re-published to new exchange preserving initial routing_key. A queue for each exchange has `message-ttl` set to a power of 2. After message is expired, it is re-routed by broker to `dead-letter-exchange` named `recover`. All queues defined for event handlers are also bound to this exchange with same routing key, thus each message after a retry appears in same incoming queue. `events` -> `demo.my_event_queue` -> Celery worker (retry) | (publishes new message on retry) V `demo:retry.N` -> `demo:retry.N` | (message ttl expires ) V `recover` -> (routing key) -> `demo.my_event_queue` - > Celery worker """ channel = self.broker_connection().default_channel for retry in range(defaults.AMQP_EVENTS_MAX_RETRIES): name = self.get_retry_exchange_name(retry) retry_exchange = Exchange(name=name, type=EXCHANGE_TYPE_FANOUT) retry_queue = Queue( name=name, exchange=retry_exchange, queue_arguments={ X_MESSAGE_TTL: 2**retry * 1000, # ms X_DEAD_LETTER_EXCHANGE: self.recover_exchange_name, }) retry_queue.declare(channel=channel) retry_queue.maybe_bind(channel=channel)
def main(): rabbit_url = "amqp://*****:*****@10.5.9.177:5672//" conn = Connection(rabbit_url) channel = conn.channel() exchange = Exchange("test", type="direct") producer = Producer(exchange=exchange, channel=channel, routing_key="vcc") queue = Queue(name="chungpht", exchange=exchange, routing_key="vcc") queue.maybe_bind(conn) queue.declare() producer.publish("Hello from other side 222")
def producer(msg=None): print("------- in producer") rabbit_url = "amqp://localhost:5672/" conn = Connection(rabbit_url) channel = conn.channel() exchange = Exchange("scrapy", type="direct") producer = Producer(exchange=exchange, channel=channel, routing_key="quotes") queue = Queue(name="quotation", exchange=exchange, routing_key="quotes") queue.maybe_bind(conn) queue.declare() producer.publish(msg) print("published ->")
def connect_rabbitmq(): # Connect to RabbitMQ conn = Connection(rabbit_url) channel = conn.channel() exchange = Exchange("contador-carro-exchange", type="direct", delivery_mode=1) producer = Producer( exchange=exchange, channel=channel, routing_key="contador-carro-exchange" ) queue = Queue(name="contador-carro-exchange", exchange=exchange, routing_key="contador-carro-exchange") queue.maybe_bind(conn) queue.declare() return producer
def declare_queue(self, connection, name='', auto_delete=False, durable=False, **kwargs): queue_args = kwargs.pop('queue_arguments', {}) queue_args['x-ha-policy'] = 'all' queue = Queue(name, durable=durable, auto_delete=auto_delete, queue_arguments=queue_args, **kwargs) queue.maybe_bind(connection.default_channel) queue.queue_declare() return queue
def publish_data_to_broker(data): try: conn = Connection(rabbit_url) channel = conn.channel() exchange = Exchange("example-exchange", type="direct") producer = Producer(exchange=exchange, channel=channel, routing_key="BOB") queue = Queue(name="example-queue", exchange=exchange, routing_key="BOB") queue.maybe_bind(conn) queue.declare() producer.publish(data) except: raise Exception('Error while publishing data to the broker')
def declare_queue(self, connection, name='', auto_delete=False, durable=False, **kwargs): queue_args = kwargs.pop('queue_arguments', {}) queue_args['x-ha-policy'] = 'all' queue = Queue(name, durable=durable, auto_delete=auto_delete, queue_arguments=queue_args, **kwargs) queue.maybe_bind(connection.default_channel) queue.queue_declare() return queue
class Populator(object): def __init__(self): url = "amqp://{}:{}@{}:5672/".format(config.rabbitmq.username, config.rabbitmq.password, config.rabbitmq.host) self.connection = Connection(url) self.channel = self.connection.channel() self.exchange = Exchange("", type="direct", durable=True) self.producer = Producer(exchange=self.exchange, channel=self.channel, serializer="pickle") self.login_queue = Queue(name=config.rabbitmq.login_queue, exchange=self.exchange, routing_key=config.rabbitmq.login_queue, durable=True) self.login_queue.maybe_bind(self.connection) self.login_queue.declare() self.work_queue = Queue(name=config.rabbitmq.work_queue, exchange=self.exchange, routing_key=config.rabbitmq.work_queue, durable=True) self.work_queue.maybe_bind(self.connection) self.work_queue.declare() def __del__(self): self.connection.close() def _publish(self, queue_name, data): self.producer.publish(data.encode('utf-8'), routing_key=queue_name, retry=True, delivery_mode=2) def publish_login(self, credentials): self._publish(config.rabbitmq.login_queue, credentials) def publish_work(self, work): self._publish(config.rabbitmq.work_queue, work)
def _init_queues(self): channel = self.connection.channel() for cqrs_id in ReplicaRegistry.models.keys(): q = Queue( self.queue_name, exchange=self.exchange, routing_key=cqrs_id, ) q.maybe_bind(channel) q.declare() self.queues.append(q) sync_q = Queue( self.queue_name, exchange=self.exchange, routing_key='cqrs.{0}.{1}'.format(self.queue_name, cqrs_id), ) sync_q.maybe_bind(channel) sync_q.declare() self.queues.append(sync_q)
def _register_archived_queue(self, **_: Any) -> None: """ Registers an exchange and a queue for archived messages. """ max_ttl = defaults.AMQP_EVENTS_ARCHIVED_MESSAGE_TTL max_len = defaults.AMQP_EVENTS_ARCHIVED_QUEUE_LENGTH if not (max_ttl or max_len): # archived exchange is disabled return archived = Exchange(name=self.archived_exchange_name, type=EXCHANGE_TYPE_FANOUT) channel = self.broker_connection().default_channel archived_queue = Queue(name=self.archived_exchange_name, exchange=archived, queue_arguments={ X_MESSAGE_TTL: max_ttl, X_MAX_LENGTH: max_len, X_QUEUE_MODE: QUEUE_MODE_LAZY }) archived_queue.declare(channel=channel) archived_queue.maybe_bind(channel=channel)
def start(self): """ Start subscriber. """ with Connection(self.config.broker_url) as conn: data_pipeline_exchange = Exchange(self.config.exchange, 'topic') queue = Queue(self.config.queue_name) queue.maybe_bind(conn) queue.declare() for routing_key in set( [i['routing_key'] for i in self.config.task_mapping]): queue.bind_to(data_pipeline_exchange, routing_key) worker = Worker(self.config, conn, [queue]) logger.info('Starting subscriber...') worker.run()
def setup_rabbitMQ(): rabbit_domain = os.environ.get('RABBITMQ_URL') rabbit_domain = rabbit_domain if rabbit_domain is not None else 'localhost' rabbit_url = 'amqp://*****:*****@' + rabbit_domain + ':5672//' # Kombu Connection conn = Connection(rabbit_url) channel = conn.channel() # Kombu Exchange # - set delivery_mode to transient to prevent disk writes for faster delivery exchange = Exchange("video-exchange", type="direct", delivery_mode=1) # Kombu Producer producer = Producer(exchange=exchange, channel=channel, routing_key="video") # Kombu Queue queue = Queue(name="video-queue", exchange=exchange, routing_key="video") queue.maybe_bind(conn) queue.declare() return queue, exchange, producer, conn
def _rabbit_mq(messages): conn = Connection(host) channel = conn.channel() exchange = Exchange(exchange_name, type="direct") queue = Queue(name=queue_name, exchange=exchange, routing_key=routing_key) queue.maybe_bind(conn) queue.declare() producer = Producer(exchange=exchange, channel=channel, routing_key=routing_key) for message in messages: # as_dict = message.asDict(recursive=True) producer.publish(message) channel.close() conn.release() return messages
def data_publish(connection, channel): exchange = Exchange('data_exchange', type="direct") producer = Producer(exchange=exchange, channel=channel, routing_key='data_info') queue = Queue(name="data_q", exchange=exchange, routing_key="data_info", queue_arguments={'x-ha-policy': 'all'}) queue.maybe_bind(connection) queue.declare() message = [ '{"device_id": "30c15b59-650a-42a7", "check": false}', '{ "device_id": "30c15b59-650a-42a7", "check": true}', '{ "device_id": "30c15b59-650a-42a7", "check": true}', '{"device_id": "30c15b59-650a-42a7", "check": false}', '{"device_id": "30c15b59-650a-42a7", "check": true}' ] for msg in message: producer.publish(msg) print "Message published", msg time.sleep(2)
def get_response_queue(self, connection, name=None): """ Set up the queue on which to listen for responses. :param connection: A Kombu Connection instance. :param name: Name of the queue (detaults to routing key). :return: A Kombu `Queue <https://kombu.readthedocs.org/en/latest/reference/ kombu.html#queue>`_ instance. """ if name is None: name = self.response_routing_key exchange = get_exchange(connection) queue = Queue(name, exchange, self.response_routing_key, connection.default_channel) queue.maybe_bind(connection) log.debug('Created queue: {0}'.format(queue)) return queue
from kombu import Connection, Exchange, Producer, Queue, Consumer rabbit_url = "redis://localhost:6379/" print("rabbit_url: ", rabbit_url) conn = Connection(rabbit_url) print("conn: ", conn) channel = conn.channel() print("channel: ", channel) exchange = Exchange("example-exchange", type="direct") print("exchange: ", exchange) producer = Producer(exchange=exchange, channel=channel, routing_key='BOB') print("producer: ", producer) queue = Queue(name="example-queue", exchange=exchange, routing_key='BOB') print("queue: ", queue, "\n", queue.maybe_bind(conn), queue.declare()) queue.maybe_bind(conn) queue.declare() producer.publish("Hello there") print("msg: ", producer.publish("Hello there")) def process_body(body, message): print("Message: ", body) message.ack() with Consumer(conn, queues=queue, callbacks=[process_body], accept=["text/plain"]): print("consumer: ", conn) conn.drain_events(timeout=2)
from kombu import Connection, Exchange, Producer, Queue, Consumer conn = Connection("amqp://*****:*****@localhost:5672") exchange = Exchange("rpc", type="direct") request_queue = Queue(name="rpc", exchange=exchange, routing_key="request") request_queue.maybe_bind(conn) request_queue.declare() def process_message(body, message): print("Request: %s" % body) message.ack() #send reply to client producer = Producer(channel=conn, routing_key=message.properties['reply_to']) producer.publish("result") with Consumer(conn, request_queue, callbacks=[process_message]): conn.drain_events()
from kombu import Connection, Exchange, Producer, Queue, Message from time import sleep import datetime from random import randint rabbit_url = "amqp://" conn = Connection(rabbit_url) ch = conn.channel() ex = Exchange('exchange-1', type='direct') p = Producer(exchange=ex, channel=ch, routing_key='BOB') q = Queue(name='q-1', exchange=ex, routing_key='BOB') q.maybe_bind(conn) q.declare() while True: timestamp = 'Timestamp: {:%Y-%m-%d %H:%M:%S}'.format( datetime.datetime.now()) print('published: {}'.format(timestamp)) p.publish(timestamp) sleep(randint(1, 5))
def delete_queue(self, connection, name, *args, **kwargs): queue = Queue(name=name) queue.maybe_bind(connection.default_channel) queue.delete(*args, **kwargs)
class Worker(object): def __init__(self, queue_name): self.queue_name = queue_name self.serializer = "pickle" self.rabbit_connect() self.poll_messages() def rabbit_connect(self): url = "amqp://{}:{}@{}:5672/".format(config.rabbitmq.username, config.rabbitmq.password, config.rabbitmq.host) self.connection = Connection(url) self.channel = self.connection.channel() self.channel.basic_qos(prefetch_size=0, prefetch_count=1, a_global=False) self.exchange = Exchange("", type="direct", durable=True) self.queue = Queue(name=self.queue_name, exchange=self.exchange, routing_key=self.queue_name) self.queue.maybe_bind(self.connection) self.queue.declare() self.producer = Producer(exchange=self.exchange, channel=self.channel, serializer=self.serializer) self.consumer = Consumer(self.connection, queues=self.queue, callbacks=[self.message_callback], accept=["application/x-python-serialize"]) #self.consumer.qos(prefetch_count = 1) def poll_messages(self): while True: try: self.process_messages() except self.connection.connection_errors: pass def process_messages(self): self.connection = self.renew_connection() while True: try: self.connection.drain_events(timeout=5) except socket.timeout: pass def renew_connection(self): new_connection = self.connection.clone() new_connection.ensure_connection(max_retries=10) self.channel = new_connection.channel() self.channel.basic_qos(prefetch_size=0, prefetch_count=1, a_global=False) self.consumer.revive(self.channel) self.producer.revive(self.channel) self.consumer.consume() return new_connection def message_callback(self, body, message): # Convert body to UTF-8 string body = body.decode('utf-8') # Process message self.process_message(body) # Tell RabbitMQ that we processed the message message.ack() def process_message(self, message): # Generic message processing stub print ("Message from queue '{}': '{}'".format(self.queue_name, message)) def produce_message(self, message): self.producer.publish(message.encode('utf-8'), routing_key=self.queue_name, retry=True, delivery_mode=2)
from kombu import Connection, Exchange, Queue, Consumer rabbit_url = "amqp://*****:*****@localhost:5672/" conn = Connection(rabbit_url) exchange = Exchange("kombu-exchange", type="direct") queue = Queue(name="kombu_queue_2", exchange=exchange, routing_key="BOB") queue.maybe_bind(conn) queue.declare() def process_message(body, message): print("The body is {}".format(body)) message.ack() with Consumer(conn, queues=queue, callbacks=[process_message], accept=["text/plain"]): conn.drain_events()
def delete_queue(self, connection, name, *args, **kwargs): queue = Queue(name=name) queue.maybe_bind(connection.default_channel) queue.delete(*args, **kwargs)
class Publisher: def __init__(self, name=ev("PUBLISHER_NAME", "kombu-publisher"), auth_url=ev("BROKER_URL", "redis://localhost:6379/0"), ssl_options={}, max_general_failures=-1): # infinite retries """ Available Transports: https://github.com/celery/kombu#transport-comparison """ self.state = "not_ready" self.name = name self.auth_url = auth_url self.ssl_options = ssl_options self.exchange = None self.queue = None self.declare_entities = [] self.conn = None self.channel = None self.producer = None self.num_setup_failures = 0 self.num_publish_failures = 0 self.max_general_failures = max_general_failures self.exchange_name = "" self.exchange_type = "direct" self.queue_name = "" self.routing_key = "" self.serializer = "json" # end of __init__ def setup_routing(self, exchange_name, queue_name, routing_key, serializer="json", on_return=None, transport_options={}, *args, **kwargs): self.exchange_name = exchange_name self.exchange = None self.routing_key = routing_key self.queue_name = queue_name self.serializer = serializer if self.routing_key: log.debug(("creating Exchange={} topic rk={}").format( self.exchange_name, self.routing_key)) self.exchange_type = "topic" else: log.debug( ("creating Exchange={} direct").format(self.exchange_name, self.routing_key)) self.exchange_type = "direct" # end of if/else self.exchange = Exchange(self.exchange_name, type=self.exchange_type) self.queue = Queue(self.queue_name, exchange=self.exchange, routing_key=self.routing_key) self.declare_entities = [self.exchange, self.queue] # https://redis.io/topics/security # # Redis does not support encryption, but I would like to try out ssl-termination # using an haproxy/nginx container running as an ssl-proxy to see if this works. # import ssl # Connection("amqp://", login_method='EXTERNAL', ssl={ # "ca_certs": '/etc/pki/tls/certs/something.crt', # "keyfile": '/etc/something/system.key', # "certfile": '/etc/something/system.cert', # "cert_reqs": ssl.CERT_REQUIRED, # }) # self.conn = Connection(self.auth_url, transport_options=transport_options) self.channel = self.conn.channel() log.debug(("creating kombu.Producer broker={} " "ssl={} ex={} rk={} serializer={}").format( self.auth_url, self.ssl_options, self.exchange_name, self.routing_key, self.serializer)) self.producer = Producer(channel=self.channel, exchange=self.exchange, routing_key=self.routing_key, auto_declare=True, serializer=self.serializer, on_return=None, *args, **kwargs) log.debug("creating kombu.Exchange={}".format(self.exchange)) self.producer.declare() log.debug("creating kombu.Queue={}".format(self.queue_name)) self.queue.maybe_bind(self.conn) self.queue.declare() self.state = "ready" # end of setup_routing def publish( self, body, exchange, routing_key, queue, priority=0, ttl=None, delivery_mode=2, # 1 - transient, 2 - persistent serializer="json", retry=True, silent=False, transport_options={}, *args, **kwargs): """ Redis does not have an Exchange or Routing Keys, but RabbitMQ does. Redis producers uses only the queue name to both publish and consume messages: http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html#configuration """ msg_sent = False if self.state != "ready": try: self.setup_routing(exchange_name=exchange, queue_name=queue, routing_key=routing_key, serializer=serializer, on_return=None, transport_options=transport_options) self.num_setup_failures = 0 self.num_publish_failures = 0 except Exception as c: sleep_duration = calc_backoff_timer(self.num_setup_failures) log.info(("SEND - Failed setup_routing with" "exchange={} rk={} ex={} sleep seconds={}").format( self.exchange.name, self.routing_key, c, sleep_duration)) self.num_setup_failures += 1 self.state = "not_ready" time.sleep(sleep_duration) # end try/ex to setup the broker if self.state != "ready": log.info(("not in a ready state after " "setup_routing - {} - stopping").format( self.state.upper())) return msg_sent # end of initializing for the first time if not silent: log.info(("SEND - " "exch={} rk={}").format(self.exchange.name, self.routing_key)) # http://docs.celeryproject.org/projects/kombu/en/latest/_modules/kombu/messaging.html#Producer.publish # http://docs.celeryproject.org/projects/kombu/en/latest/reference/kombu.html#kombu.Exchange.delivery_mode try: self.producer.publish(body=body, exchange=self.exchange.name, routing_key=self.routing_key, auto_declare=True, serializer=self.serializer, priority=priority, delivery_mode=delivery_mode, expiration=ttl, retry=False, *args, **kwargs) msg_sent = True self.num_publish_failures = 0 except Exception as e: msg_sent = False sleep_duration = calc_backoff_timer(self.num_publish_failures) log.info(("SEND - Failed publish with" "exchange={} rk={} ex={} sleep seconds={}").format( self.exchange.name, self.routing_key, e, sleep_duration)) self.num_publish_failures += 1 self.state = "not_ready" time.sleep(sleep_duration) # end of try/ex publish if not silent: log.debug( ("DONE - " "exch={} queues={} sent={}").format(self.state.upper(), self.exchange.name, self.queue, msg_sent)) return msg_sent
def task_publish_to_core(self=None, publish_node=None): """task_publish_to_core :param self: parent task object for bind=True :param publish_node: dictionary to send to the AntiNex Core Worker """ if settings.ANTINEX_WORKER_ENABLED: conn = None dataset = publish_node["body"].get("dataset", None) predict_rows = publish_node["body"].get("predict_rows", None) if not dataset and not predict_rows: log.info( ("skipping antinex core publish body={} - " "is missing dataset and predict_rows").format(publish_node)) return None # end of checking for supported requests to the core log.info(("task_publish_to_core - start req={}").format( str(publish_node)[0:32])) if not predict_rows: log.info(("building predict_rows from dataset={}").format(dataset)) predict_rows = [] predict_rows_df = pd.read_csv(dataset) for idx, org_row in predict_rows_df.iterrows(): new_row = json.loads(org_row.to_json()) new_row["idx"] = len(predict_rows) + 1 predict_rows.append(new_row) # end of building predict rows publish_node["body"]["apply_scaler"] = True publish_node["body"]["predict_rows"] = pd.DataFrame( predict_rows).to_json() # end of validating publish_node["body"]["ml_type"] = \ publish_node["body"]["manifest"]["ml_type"] log.debug(("NEXCORE - ssl={} exchange={} routing_key={}").format( settings.ANTINEX_SSL_OPTIONS, settings.ANTINEX_EXCHANGE_NAME, settings.ANTINEX_ROUTING_KEY)) try: if settings.ANTINEX_WORKER_SSL_ENABLED: log.debug("connecting with ssl") conn = Connection(settings.ANTINEX_AUTH_URL, login_method="EXTERNAL", ssl=settings.ANTINEX_SSL_OPTIONS) else: log.debug("connecting without ssl") conn = Connection(settings.ANTINEX_AUTH_URL) # end of connecting conn.connect() log.debug("getting channel") channel = conn.channel() core_exchange = Exchange(settings.ANTINEX_EXCHANGE_NAME, type=settings.ANTINEX_EXCHANGE_TYPE, durable=True) log.debug("creating producer") producer = Producer(channel=channel, auto_declare=True, serializer="json") try: log.debug("declaring exchange") producer.declare() except Exception as k: log.error(("declare exchange failed with ex={}").format(k)) # end of try to declare exchange which can fail if it exists core_queue = Queue(settings.ANTINEX_QUEUE_NAME, core_exchange, routing_key=settings.ANTINEX_ROUTING_KEY, durable=True) try: log.debug("declaring queue") core_queue.maybe_bind(conn) core_queue.declare() except Exception as k: log.error(("declare queue={} routing_key={} failed with ex={}" ).format(settings.ANTINEX_QUEUE_NAME, settings.ANTINEX_ROUTING_KEY, k)) # end of try to declare queue which can fail if it exists log.info( ("publishing exchange={} routing_key={} persist={}").format( core_exchange.name, settings.ANTINEX_ROUTING_KEY, settings.ANTINEX_DELIVERY_MODE)) producer.publish(body=publish_node["body"], exchange=core_exchange.name, routing_key=settings.ANTINEX_ROUTING_KEY, auto_declare=True, serializer="json", delivery_mode=settings.ANTINEX_DELIVERY_MODE) except Exception as e: log.info(("Failed to publish to core req={} with ex={}").format( publish_node, e)) # try/ex if conn: conn.release() log.info(("task_publish_to_core - done")) else: log.debug("core - disabled") # publish to the core if enabled return None