Exemple #1
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     self.assertTrue(p.exchange.is_bound)
     self.assertNotIn("exchange_declare", channel, "auto_declare=False does not declare exchange")
     p.declare()
     self.assertIn("exchange_declare", channel, "p.declare() declares exchange")
Exemple #2
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     self.assertTrue(p.exchange.is_bound)
     self.assertNotIn('exchange_declare', channel,
                      'auto_declare=False does not declare exchange')
     p.declare()
     self.assertIn('exchange_declare', channel,
                   'p.declare() declares exchange')
Exemple #3
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     assert p.exchange.is_bound
     # auto_declare=False does not declare exchange
     assert 'exchange_declare' not in channel
     # p.declare() declares exchange')
     p.declare()
     assert 'exchange_declare' in channel
Exemple #4
0
 def test_manual_declare(self):
     channel = self.connection.channel()
     p = Producer(channel, self.exchange, auto_declare=False)
     assert p.exchange.is_bound
     # auto_declare=False does not declare exchange
     assert 'exchange_declare' not in channel
     # p.declare() declares exchange')
     p.declare()
     assert 'exchange_declare' in channel
Exemple #5
0
 def send(self, message):
     print("Send called", message)
     with self._create_connection() as connection:
         producer = Producer(connection,
                             exchange=self._exchange,
                             routing_key=Config.rabbit_mq_routing_key)
         payload = message.payload()
         producer.declare()
         producer.publish(payload,
                          exchange=self._exchange,
                          routing_key=Config.rabbit_mq_routing_key)
connection = Connection(auth_url,
                        heartbeat=60,
                        transport_options=transport_options)

# noqa http://docs.celeryproject.org/projects/kombu/en/latest/userguide/producers.html
print("getting channel")
channel = connection.channel()

exchange_type = "topic"
exchange = Exchange("east-coast", type=exchange_type, durable=True)

print("creating producer")
producer = Producer(channel=channel, auto_declare=True, serializer="json")

print("declaring producer")
producer.declare()

print(("declaring exchange={}").format(exchange.name))

# noqa http://docs.celeryproject.org/projects/kombu/en/latest/reference/kombu.html#queue
queues = [
    Queue("us.east.charlotte",
          exchange,
          routing_key="us.east.charlotte",
          durable=True),
    Queue("us.east.newyork",
          exchange,
          routing_key="us.east.newyork",
          durable=False)
]
Exemple #7
0
	def send(self):
		with self.make_connection() as connection:
			producer = Producer(connection, exchange = self.exchange, routing_key = self.routing_key)
			producer.declare()
			producer.publish(self.payload)
def task_publish_to_core(self=None, publish_node=None):
    """task_publish_to_core

    :param self: parent task object for bind=True
    :param publish_node: dictionary to send to the AntiNex Core Worker
    """
    if settings.ANTINEX_WORKER_ENABLED:

        conn = None
        dataset = publish_node["body"].get("dataset", None)
        predict_rows = publish_node["body"].get("predict_rows", None)

        if not dataset and not predict_rows:
            log.info(
                ("skipping antinex core publish body={} - "
                 "is missing dataset and predict_rows").format(publish_node))
            return None
        # end of checking for supported requests to the core

        log.info(("task_publish_to_core - start req={}").format(
            str(publish_node)[0:32]))

        if not predict_rows:
            log.info(("building predict_rows from dataset={}").format(dataset))
            predict_rows = []
            predict_rows_df = pd.read_csv(dataset)
            for idx, org_row in predict_rows_df.iterrows():
                new_row = json.loads(org_row.to_json())
                new_row["idx"] = len(predict_rows) + 1
                predict_rows.append(new_row)
            # end of building predict rows

            publish_node["body"]["apply_scaler"] = True
            publish_node["body"]["predict_rows"] = pd.DataFrame(
                predict_rows).to_json()
        # end of validating

        publish_node["body"]["ml_type"] = \
            publish_node["body"]["manifest"]["ml_type"]

        log.debug(("NEXCORE - ssl={} exchange={} routing_key={}").format(
            settings.ANTINEX_SSL_OPTIONS, settings.ANTINEX_EXCHANGE_NAME,
            settings.ANTINEX_ROUTING_KEY))

        try:
            if settings.ANTINEX_WORKER_SSL_ENABLED:
                log.debug("connecting with ssl")
                conn = Connection(settings.ANTINEX_AUTH_URL,
                                  login_method="EXTERNAL",
                                  ssl=settings.ANTINEX_SSL_OPTIONS)
            else:
                log.debug("connecting without ssl")
                conn = Connection(settings.ANTINEX_AUTH_URL)
            # end of connecting

            conn.connect()

            log.debug("getting channel")
            channel = conn.channel()

            core_exchange = Exchange(settings.ANTINEX_EXCHANGE_NAME,
                                     type=settings.ANTINEX_EXCHANGE_TYPE,
                                     durable=True)

            log.debug("creating producer")
            producer = Producer(channel=channel,
                                auto_declare=True,
                                serializer="json")

            try:
                log.debug("declaring exchange")
                producer.declare()
            except Exception as k:
                log.error(("declare exchange failed with ex={}").format(k))
            # end of try to declare exchange which can fail if it exists

            core_queue = Queue(settings.ANTINEX_QUEUE_NAME,
                               core_exchange,
                               routing_key=settings.ANTINEX_ROUTING_KEY,
                               durable=True)

            try:
                log.debug("declaring queue")
                core_queue.maybe_bind(conn)
                core_queue.declare()
            except Exception as k:
                log.error(("declare queue={} routing_key={} failed with ex={}"
                           ).format(settings.ANTINEX_QUEUE_NAME,
                                    settings.ANTINEX_ROUTING_KEY, k))
            # end of try to declare queue which can fail if it exists

            log.info(
                ("publishing exchange={} routing_key={} persist={}").format(
                    core_exchange.name, settings.ANTINEX_ROUTING_KEY,
                    settings.ANTINEX_DELIVERY_MODE))

            producer.publish(body=publish_node["body"],
                             exchange=core_exchange.name,
                             routing_key=settings.ANTINEX_ROUTING_KEY,
                             auto_declare=True,
                             serializer="json",
                             delivery_mode=settings.ANTINEX_DELIVERY_MODE)

        except Exception as e:
            log.info(("Failed to publish to core req={} with ex={}").format(
                publish_node, e))
        # try/ex

        if conn:
            conn.release()

        log.info(("task_publish_to_core - done"))
    else:
        log.debug("core - disabled")
    # publish to the core if enabled

    return None
class Publisher:
    def __init__(self,
                 name=ev("PUBLISHER_NAME", "kombu-publisher"),
                 auth_url=ev("BROKER_URL", "redis://localhost:6379/0"),
                 ssl_options={},
                 max_general_failures=-1):  # infinite retries
        """
        Available Transports:
        https://github.com/celery/kombu#transport-comparison
        """

        self.state = "not_ready"
        self.name = name
        self.auth_url = auth_url
        self.ssl_options = ssl_options

        self.exchange = None
        self.queue = None
        self.declare_entities = []
        self.conn = None
        self.channel = None
        self.producer = None
        self.num_setup_failures = 0
        self.num_publish_failures = 0
        self.max_general_failures = max_general_failures

        self.exchange_name = ""
        self.exchange_type = "direct"
        self.queue_name = ""
        self.routing_key = ""
        self.serializer = "json"

    # end of __init__

    def setup_routing(self,
                      exchange_name,
                      queue_name,
                      routing_key,
                      serializer="json",
                      on_return=None,
                      transport_options={},
                      *args,
                      **kwargs):

        self.exchange_name = exchange_name
        self.exchange = None
        self.routing_key = routing_key
        self.queue_name = queue_name
        self.serializer = serializer

        if self.routing_key:
            log.debug(("creating Exchange={} topic rk={}").format(
                self.exchange_name, self.routing_key))
            self.exchange_type = "topic"
        else:
            log.debug(
                ("creating Exchange={} direct").format(self.exchange_name,
                                                       self.routing_key))
            self.exchange_type = "direct"
        # end of if/else

        self.exchange = Exchange(self.exchange_name, type=self.exchange_type)

        self.queue = Queue(self.queue_name,
                           exchange=self.exchange,
                           routing_key=self.routing_key)

        self.declare_entities = [self.exchange, self.queue]

        # https://redis.io/topics/security
        #
        # Redis does not support encryption, but I would like to try out ssl-termination
        # using an haproxy/nginx container running as an ssl-proxy to see if this works.

        # import ssl
        # Connection("amqp://", login_method='EXTERNAL', ssl={
        #               "ca_certs": '/etc/pki/tls/certs/something.crt',
        #               "keyfile": '/etc/something/system.key',
        #               "certfile": '/etc/something/system.cert',
        #               "cert_reqs": ssl.CERT_REQUIRED,
        #          })
        #
        self.conn = Connection(self.auth_url,
                               transport_options=transport_options)

        self.channel = self.conn.channel()

        log.debug(("creating kombu.Producer broker={} "
                   "ssl={} ex={} rk={} serializer={}").format(
                       self.auth_url, self.ssl_options, self.exchange_name,
                       self.routing_key, self.serializer))

        self.producer = Producer(channel=self.channel,
                                 exchange=self.exchange,
                                 routing_key=self.routing_key,
                                 auto_declare=True,
                                 serializer=self.serializer,
                                 on_return=None,
                                 *args,
                                 **kwargs)

        log.debug("creating kombu.Exchange={}".format(self.exchange))
        self.producer.declare()

        log.debug("creating kombu.Queue={}".format(self.queue_name))
        self.queue.maybe_bind(self.conn)
        self.queue.declare()

        self.state = "ready"

    # end of setup_routing

    def publish(
            self,
            body,
            exchange,
            routing_key,
            queue,
            priority=0,
            ttl=None,
            delivery_mode=2,  # 1 - transient, 2 - persistent
            serializer="json",
            retry=True,
            silent=False,
            transport_options={},
            *args,
            **kwargs):
        """
        Redis does not have an Exchange or Routing Keys, but RabbitMQ does.

        Redis producers uses only the queue name to both publish and consume messages:
        http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html#configuration
        """

        msg_sent = False

        if self.state != "ready":
            try:
                self.setup_routing(exchange_name=exchange,
                                   queue_name=queue,
                                   routing_key=routing_key,
                                   serializer=serializer,
                                   on_return=None,
                                   transport_options=transport_options)
                self.num_setup_failures = 0
                self.num_publish_failures = 0
            except Exception as c:
                sleep_duration = calc_backoff_timer(self.num_setup_failures)
                log.info(("SEND - Failed setup_routing with"
                          "exchange={} rk={} ex={} sleep seconds={}").format(
                              self.exchange.name, self.routing_key, c,
                              sleep_duration))
                self.num_setup_failures += 1
                self.state = "not_ready"
                time.sleep(sleep_duration)
            # end try/ex to setup the broker

            if self.state != "ready":
                log.info(("not in a ready state after "
                          "setup_routing - {} - stopping").format(
                              self.state.upper()))
                return msg_sent
        # end of initializing for the first time

        if not silent:
            log.info(("SEND - "
                      "exch={} rk={}").format(self.exchange.name,
                                              self.routing_key))

        # http://docs.celeryproject.org/projects/kombu/en/latest/_modules/kombu/messaging.html#Producer.publish
        # http://docs.celeryproject.org/projects/kombu/en/latest/reference/kombu.html#kombu.Exchange.delivery_mode
        try:
            self.producer.publish(body=body,
                                  exchange=self.exchange.name,
                                  routing_key=self.routing_key,
                                  auto_declare=True,
                                  serializer=self.serializer,
                                  priority=priority,
                                  delivery_mode=delivery_mode,
                                  expiration=ttl,
                                  retry=False,
                                  *args,
                                  **kwargs)
            msg_sent = True
            self.num_publish_failures = 0
        except Exception as e:
            msg_sent = False
            sleep_duration = calc_backoff_timer(self.num_publish_failures)
            log.info(("SEND - Failed publish with"
                      "exchange={} rk={} ex={} sleep seconds={}").format(
                          self.exchange.name, self.routing_key, e,
                          sleep_duration))
            self.num_publish_failures += 1
            self.state = "not_ready"
            time.sleep(sleep_duration)
        # end of try/ex publish

        if not silent:
            log.debug(
                ("DONE - "
                 "exch={} queues={} sent={}").format(self.state.upper(),
                                                     self.exchange.name,
                                                     self.queue, msg_sent))

        return msg_sent