Exemplo n.º 1
0
    def __init__(self, uuid, exchange_name, on_message, on_wait=None,
                 **kwargs):
        self._uuid = uuid
        self._exchange_name = exchange_name
        self._on_message = on_message
        self._on_wait = on_wait
        self._running = threading.Event()
        self._url = kwargs.get('url')
        self._transport = kwargs.get('transport')
        self._transport_opts = kwargs.get('transport_options')

        # create connection
        self._conn = kombu.Connection(self._url, transport=self._transport,
                                      transport_options=self._transport_opts)

        # create exchange
        self._exchange = kombu.Exchange(name=self._exchange_name,
                                        channel=self._conn,
                                        durable=False,
                                        auto_delete=True)
Exemplo n.º 2
0
    def run(connection, args):
        exchange = kombu.Exchange(name='villas', type='headers')

        headers = SimulatorCommand.get_headers(args)
        headers['x-match'] = 'any' if len(headers) > 0 else 'all'

        queue = kombu.Queue(exchange=exchange,
                            binding_arguments=headers,
                            durable=False)

        consumer = kombu.Consumer(connection,
                                  queues=queue,
                                  on_message=MonitorCommand.on_message)

        try:
            with consumer:
                while True:
                    connection.drain_events()
        except KeyboardInterrupt:
            pass
Exemplo n.º 3
0
    def start(self):
        self._browsing_threads = set()
        self._browsing_threads_lock = threading.Lock()

        self._exchange = kombu.Exchange(name=self.exchange_name,
                                        type='direct',
                                        durable=True)

        self._reconnect_requested = False

        self._producer = None
        self._producer_lock = threading.Lock()
        with self._producer_lock:
            self._producer_conn = kombu.Connection(self.amqp_url)
            self._producer = self._producer_conn.Producer(serializer='json')

        self._consumer_thread = threading.Thread(target=self._consume_amqp,
                                                 name='AmqpConsumerThread')
        self._consumer_stop = threading.Event()
        self._consumer_thread.start()
Exemplo n.º 4
0
    def __init__(self,
                 url=None,
                 prefix='asgi:',
                 expiry=60,
                 group_expiry=86400,
                 capacity=100,
                 channel_capacity=None):
        super(AMQPChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )

        kombu.serialization.enable_insecure_serializers()

        self.url = url or 'amqp://*****:*****@localhost:5672/%2F'
        self.prefix = prefix + 'tower:{}'.format(socket.gethostname())
        self.exchange = kombu.Exchange(self.prefix, type='topic')

        self.tdata = threading.local()
Exemplo n.º 5
0
def notify_radius(state, priority) -> int:
    config = get_config(runtime_checks=True)
    queue_name = config.HADES_CELERY_NODE_QUEUE
    exchange_name = config.HADES_CELERY_RPC_EXCHANGE
    exchange_type = config.HADES_CELERY_RPC_EXCHANGE_TYPE
    routing_key = config.HADES_CELERY_SITE_ROUTING_KEY
    exchange = kombu.Exchange(exchange_name, exchange_type)
    with closing(app.connection(connect_timeout=1)) as connection:
        queue = app.amqp.queues[queue_name]
        bound_queue = queue.bind(connection.default_channel)
        if state == 'MASTER':
            logger.info("Binding site node queue %s to RPC exchange %s "
                        "with site routing key %s",
                        queue_name, exchange_name, routing_key)
            bound_queue.bind_to(exchange=exchange, routing_key=routing_key)
        else:
            logger.info("Unbinding site node queue %s from RPC exchange %s "
                        "with site routing key %s",
                        queue_name, exchange_name, routing_key)
            bound_queue.unbind_from(exchange=exchange, routing_key=routing_key)
    return 0
 def _hub_bootstrap(self):
     with connections[self.conn].acquire(block=True) as conn:
         exchange = kombu.Exchange(
             name=self.exchange,
             type=self.transport,
             durable=True,
             channel=conn,
         )
         exchange.declare()
         if self.transport == ETransport.DIRECT:
             q = 'sameQ'
         queue = kombu.Queue(name=q,
                             exchange=exchange,
                             routing_key=self.rkey,
                             channel=conn,
                             message_ttl=600,
                             queue_arguments={'x-queue-type': 'classic'},
                             durable=True)
         queue.declare()
         # It will run forever to consume msg.
         C(conn, [queue], self._handle).run()
Exemplo n.º 7
0
def pulse_consumer(connection, exchange, request):
    exchange_name = 'exchange/treeherder/v1/{}'.format(exchange)

    exchange = kombu.Exchange(name=exchange_name, type='topic')

    queue = kombu.Queue(
        no_ack=True,
        exchange=exchange,  # Exchange name
        routing_key='#',  # Bind to all messages
        auto_delete=True,  # Delete after each test
        exclusive=False)  # Disallow multiple consumers

    simpleQueue = connection.SimpleQueue(name=queue,
                                         channel=connection,
                                         no_ack=True)

    def fin():
        connection.release()

    request.addfinalizer(fin)
    return simpleQueue
Exemplo n.º 8
0
    def run(connection, args):
        channel = connection.channel()

        exchange = kombu.Exchange('villas', type='headers', durable=True)

        producer = kombu.Producer(channel, exchange=exchange)
        consumer = kombu.Consumer(channel,
                                  queues=kombu.Queue(exchange=exchange,
                                                     durable=False),
                                  on_message=SimulatorPingCommand.on_message)

        message = {'action': 'ping'}

        producer.publish(message, headers=SimulatorCommand.get_headers(args))

        with consumer:
            try:
                while True:
                    connection.drain_events(timeout=10)
            except socket.timeout:
                pass
Exemplo n.º 9
0
    def publish(self, exchange_name, routing_key, payload):
        with self.connection as connection:
            if not connection.connected:
                connection.connect()

            exchange = kombu.Exchange(exchange_name, type="topic")
            message = {
                "payload": payload,
                "_meta": {
                    "exchange": exchange_name,
                    "routing_key": routing_key,
                    "serializer": "json",
                    "sent": datetime.datetime.utcnow().isoformat()
                },
            }

            producer = connection.Producer(exchange=exchange,
                                           routing_key=routing_key,
                                           serializer="json")
            producer.publish(message)
            connection.close()
    def _setup_connection(self):
        """Returns True if a valid connection exists already, or if one can be
        created."""

        if self.conn:
            return True

        id_conf = read_conf(ID_CONF_FILE_NAME)

        # The indentity.yaml file contains either a singular string variable
        # 'rabbit_host', or a comma separated list in the plural variable
        # 'rabbit_hosts'
        host = None
        hosts = id_conf.get('rabbit_hosts', None)
        if hosts is not None:
            host = hosts.split(",")[0]
        else:
            host = id_conf.get('rabbit_host', None)

        if host is None:
            log.warning("no host info in configuration, can't set up rabbit.")
            return False

        try:
            url = "amqp://{}:{}@{}/{}".format(id_conf['rabbit_userid'],
                                              id_conf['rabbit_password'], host,
                                              id_conf['rabbit_virtual_host'])

            self.conn = kombu.BrokerConnection(url)
            self.exchange = kombu.Exchange("glance-simplestreams-sync-status")
            status_queue = kombu.Queue("glance-simplestreams-sync-status",
                                       exchange=self.exchange)

            status_queue(self.conn.channel()).declare()

        except:
            log.exception("Exception during kombu setup")
            return False

        return True
Exemplo n.º 11
0
    def __init__(self, rabbit_ip, rabbit_port, rabbit_user, rabbit_password,
                 rabbit_vhost, rabbit_ha_mode, q_name, subscribe_cb, logger,
                 **kwargs):
        self._rabbit_ip = rabbit_ip
        self._rabbit_port = rabbit_port
        self._rabbit_user = rabbit_user
        self._rabbit_password = rabbit_password
        self._rabbit_vhost = rabbit_vhost
        self._subscribe_cb = subscribe_cb
        self._logger = logger
        self._publish_queue = Queue()
        self._conn_lock = Semaphore()

        self.obj_upd_exchange = kombu.Exchange('vnc_config.object-update', 'fanout',
                                               durable=False)
        self._ssl_params = self._fetch_ssl_params(**kwargs)

        # Register a handler for SIGTERM so that we can release the lock
        # Without it, it can take several minutes before new master is elected
        # If any app using this wants to register their own sigterm handler,
        # then we will have to modify this function to perhaps take an argument
        gevent.signal(signal.SIGTERM, self.sigterm_handler)
Exemplo n.º 12
0
    def publish(self, exchange_name, routing_key, payload):
        with self.connection as connection:
            if not connection.connected:
                connection.connect()

            exchange = kombu.Exchange(exchange_name, type='topic')
            message = {
                'payload': payload,
                '_meta': {
                    'exchange': exchange_name,
                    'routing_key': routing_key,
                    'serializer': 'json',
                    'sent': datetime.datetime.utcnow().isoformat()},
            }

            producer = connection.Producer(
                exchange=exchange,
                routing_key=routing_key,
                serializer='json',
            )
            producer.publish(message)
            connection.close()
Exemplo n.º 13
0
    def _configureKombu(self):
        """
        Configure kombu for rabbitmq
        """
        try:
            connString = 'amqp://{0}:{1}@{2}:{3}//'.format(
                RABBITMQ['mquser'], RABBITMQ['mqpassword'],
                RABBITMQ['mqserver'], RABBITMQ['mqport'])
            self.mqConn = kombu.Connection(connString)

            self.alertExchange = kombu.Exchange(name=RABBITMQ['alertexchange'],
                                                type='topic',
                                                durable=True)
            self.alertExchange(self.mqConn).declare()
            alertQueue = kombu.Queue(RABBITMQ['alertqueue'],
                                     exchange=self.alertExchange)
            alertQueue(self.mqConn).declare()
            self.mqproducer = self.mqConn.Producer(serializer='json')
            self.log.debug('Kombu configured')
        except Exception as e:
            self.log.error(
                'Exception while configuring kombu for alerts: {0}'.format(e))
Exemplo n.º 14
0
    def declare(self, queue, exchange='', enable_ha=False, ttl=0):
        self._check_exception()
        if not self._connected:
            raise RuntimeError('Not connected to RabbitMQ')

        queue_arguments = {}
        if enable_ha is True:
            # To use mirrored queues feature in RabbitMQ 2.x
            # we need to declare this policy on the queue itself.
            #
            # Warning: this option has no effect on RabbitMQ 3.X,
            # to enable mirrored queues feature in RabbitMQ 3.X, please
            # configure RabbitMQ.
            queue_arguments['x-ha-policy'] = 'all'
        if ttl > 0:
            queue_arguments['x-expires'] = ttl

        exchange = kombu.Exchange(exchange, type='direct', durable=True)
        queue = kombu.Queue(queue, exchange, queue, durable=False,
                            queue_arguments=queue_arguments)
        bound_queue = queue(self._connection)
        bound_queue.declare()
Exemplo n.º 15
0
def _create_exchange_and_queue(queue_name):
    """
    Create a Kombu message Exchange and Queue.

    Args:
        queue_name (str): The target queue's name

    Returns:
        tuple(kombu.Exchange, kombu.Queue)

    """
    exchange = kombu.Exchange(
        settings.QUEUE_EXCHANGE_NAME,
        'direct',
        durable=True
    )
    message_queue = kombu.Queue(
        queue_name,
        exchange=exchange,
        routing_key=queue_name
    )
    return exchange, message_queue
Exemplo n.º 16
0
 def publish(self,
             message: Union[dict, str],
             headers: dict = None,
             exchange: str = 'transport',
             routing_key: str = '*',
             exchange_type: Exchange_Type = 'direct') -> None:
     """
     Publish a message to th AMQP Queue
     :param message: message to be published
     :param headers: header key-values to publish with the message
     :param exchange: specifies the top level specifier for message publish
     :param routing_key: determines which queue the message is published to
     :param exchange_type: Type of the exchange - ['direct', 'fanout', 'headers', 'topic']
     """
     self._conn.connect()
     producer = kombu.Producer(self._conn.channel())
     producer.publish(message,
                      headers=headers or {},
                      exchange=kombu.Exchange(exchange, type=exchange_type),
                      routing_key=routing_key)
     producer.close()
     self._conn.release()
Exemplo n.º 17
0
    def _publish_consume(self):
        results = []

        def process_message(body, message):
            results.append(body)
            message.ack()

        task_queue = kombu.Queue('tasks',
                                 kombu.Exchange('tasks'),
                                 routing_key='tasks')
        to_publish = {'hello': 'world'}
        self.producer.publish(to_publish,
                              exchange=task_queue.exchange,
                              routing_key=task_queue.routing_key,
                              declare=[task_queue])

        with kombu.Consumer(self.conn, [task_queue],
                            accept=['json'],
                            callbacks=[process_message]) as consumer:
            Pin.override(consumer, service='kombu-patch', tracer=self.tracer)
            self.conn.drain_events(timeout=2)

        self.assertEqual(results[0], to_publish)
Exemplo n.º 18
0
    def __init__(self, hosts, \
        queue, routing_key, exchange_type='direct', \
        serializer='json', compression='zlib'):
        self.hosts = hosts
        self.queue = queue
        self.routing_key = routing_key
        self.exchange_type = exchange_type
        self.serializer = serializer
        self.compression = compression
        self.connection = kombu.Connection(hosts)
        self.connection.ensure_connection()
        self.connection.connect()
        self.channel = self.connection.channel()

        self.exchange = kombu.Exchange(self.queue, type=exchange_type)
        #self.queue = kombu.Queue(self.queue, self.exchange, \
        #    routing_key=self.routing_key)

        self.producer = kombu.Producer(self.channel, \
            self.exchange, routing_key=self.routing_key, \
            serializer=self.serializer, \
            compression=self.compression, \
            on_return=on_return_callback)
Exemplo n.º 19
0
    def publish(self, message: Union[dict, str] = "", headers: dict = None, exchange: str = EXCHANGE, routing_key: str = ROUTING_KEY):
        """
        Publish a message to th AMQP Queue
        :param message: message to be published
        :param headers: header key-values to publish with the message
        :param exchange: specifies the top level specifier for message publish
        :param routing_key: determines which queue the message is published to
        """
        self._conn.connect()
        queue = kombu.Queue(routing_key, kombu.Exchange(exchange, type="topic"), routing_key=routing_key)
        queue.maybe_bind(self._conn)
        queue.declare()

        producer = kombu.Producer(self._conn.channel())
        producer.publish(
            message,
            headers=headers or {},
            exchange=queue.exchange,
            routing_key=queue.routing_key,
            declare=[queue]
        )
        producer.close()
        self._conn.release()
Exemplo n.º 20
0
def reload_data(instance_config, job_id):
    """ reload data on all kraken of this instance"""
    job = models.Job.query.get(job_id)
    instance = job.instance
    logging.info("Unqueuing job {}, reload data of instance {}".format(job.id, instance.name))
    logger = get_instance_logger(instance)
    try:
        task = navitiacommon.task_pb2.Task()
        task.action = navitiacommon.task_pb2.RELOAD

        connection = kombu.Connection(current_app.config['CELERY_BROKER_URL'])
        exchange = kombu.Exchange(instance_config.exchange, 'topic',
                                  durable=True)
        producer = connection.Producer(exchange=exchange)

        logger.info("reload kraken")
        producer.publish(task.SerializeToString(),
                routing_key=instance.name + '.task.reload')
        connection.release()
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
Exemplo n.º 21
0
 def __init__(
     self,
     url,
     manager_name,
     connect_ssl=None,
     timeout=DEFAULT_TIMEOUT,
     publish_kwds={},
 ):
     """
     """
     if not kombu:
         raise Exception(KOMBU_UNAVAILABLE)
     self.__url = url
     self.__manager_name = manager_name
     self.__connect_ssl = connect_ssl
     self.__exchange = kombu.Exchange(DEFAULT_EXCHANGE_NAME, DEFAULT_EXCHANGE_TYPE)
     self.__timeout = timeout
     # Be sure to log message publishing failures.
     if publish_kwds.get("retry", False):
         if "retry_policy" not in publish_kwds:
             publish_kwds["retry_policy"] = {}
         if "errback" not in publish_kwds["retry_policy"]:
             publish_kwds["retry_policy"]["errback"] = self.__publish_errback
     self.__publish_kwds = publish_kwds
Exemplo n.º 22
0
 def __init__(self,
              url,
              queue,
              headers,
              allowed,
              username='******',
              password='******',
              **kwargs):
     super(KombuPublisher, self).__init__(allowed, **kwargs)
     self._url = url
     self.queue = queue
     self._headers = headers
     self.username = username
     self.password = password
     self.exchange = kombu.Exchange(name='amq.direct', type='direct')
     self._queue = kombu.Queue(name=queue,
                               exchange=self.exchange,
                               routing_key=queue)
     self.connection = kombu.Connection(self._url,
                                        userid=self.username,
                                        password=self.password)
     self.producer = kombu.Producer(self.connection,
                                    routing_key=self.queue,
                                    exchange=self.exchange)
Exemplo n.º 23
0
def amqp_publish_user(owner, routing_key, data):
    with kombu.Connection(config.BROKER_URL) as connection:
        channel = connection.channel()
        try:
            kombu.Producer(channel).publish(
                data, exchange=kombu.Exchange(_amqp_owner_exchange(owner)),
                routing_key=routing_key, serializer='json', retry=True
            )
            started_at = time()
            while True:
                try:
                    connection.drain_events(timeout=0.5)
                except AmqpNotFound:
                    raise
                except:
                    pass
                if time() - started_at >= 0.5:
                    break
        except AmqpNotFound:
            return False
        else:
            return True
        finally:
            channel.close()
Exemplo n.º 24
0
 def test_user_published(self):
     idm_broker_config = apps.get_app_config('forsta_broker')
     with idm_broker_config.broker.acquire(block=True) as conn:
         queue = kombu.Queue(exclusive=True).bind(conn)
         queue.declare()
         queue.bind_to(exchange=kombu.Exchange('idm.auth.user'),
                       routing_key='#')
         connection = transaction.get_connection()
         self.assertFalse(connection.in_atomic_block)
         with transaction.atomic():
             user = User.objects.create(identity_id=uuid.uuid4(),
                                        primary=True,
                                        is_active=True)
         for i in range(5):
             message = queue.get()
             if message:
                 break
             time.sleep(0.1)
         self.assertIsInstance(message, Message)
         self.assertEqual(message.delivery_info['routing_key'],
                          'User.created.{}'.format(str(user.id)))
         self.assertEqual(message.content_type, 'application/json')
         self.assertEqual(
             json.loads(message.body.decode())['@type'], 'User')
Exemplo n.º 25
0
def get_listener(conn, userid, exchanges=None, extra_data=None, logger=None):
    """Obtain a Pulse consumer that can handle received messages.

    Returns a ``Listener`` instance bound to listen to the requested exchanges.
    Callers should use ``add_callback`` to register functions
    that will be called when a message is received.

    The callback functions receive one argument ``body``, the decoded message body.
    """
    queues = []

    if exchanges is None:
        raise ValueError("No exchanges supplied")

    for queue_name, exchange_name, key_name in exchanges:
        queue_name = 'queue/%s/%s' % (userid, queue_name)

        exchange = kombu.Exchange(exchange_name, type='topic',
                                  channel=conn)
        exchange.declare(passive=True)

        queue = kombu.Queue(name=queue_name,
                            exchange=exchange,
                            durable=True,
                            routing_key=key_name,
                            exclusive=False,
                            auto_delete=False,
                            channel=conn,
                            extra_data=extra_data)
        queues.append(queue)
        # queue.declare() declares the exchange, which isn't allowed by the
        # server. So call the low-level APIs to only declare the queue itself.
        queue.queue_declare()
        queue.queue_bind()

    return Listener(conn, [item[1] for item in exchanges], queues, logger)
Exemplo n.º 26
0
    def _make_exchange(name,
                       durable=False,
                       auto_delete=True,
                       exchange_type='topic'):
        """Make named exchange.

        This method creates object representing exchange on RabbitMQ. It would
        create a new exchange if exchange with given name don't exists.

        :param name: Name of the exchange.
        :param durable: If set to True, messages on this exchange would be
                        store on disk - therefore can be retrieve after
                        failure.
        :param auto_delete: If set to True, exchange would be automatically
                            deleted when none is connected.
        :param exchange_type: Type of the exchange. Can be one of 'direct',
                              'topic', 'fanout', 'headers'. See Kombu docs for
                              further details.
        :return: Kombu exchange object.
        """
        return kombu.Exchange(name=name,
                              type=exchange_type,
                              durable=durable,
                              auto_delete=auto_delete)
Exemplo n.º 27
0
def amqp_subscribe(exchange,
                   callback,
                   queue='',
                   ex_type='fanout',
                   routing_keys=None,
                   durable=False,
                   auto_delete=True):
    with kombu.pools.connections[kombu.Connection(config.BROKER_URL)].acquire(
            block=True, timeout=10) as connection:
        exchange = kombu.Exchange(exchange,
                                  type=ex_type,
                                  durable=durable,
                                  auto_delete=auto_delete)
        if not routing_keys:
            queue = kombu.Queue(queue, exchange, exclusive=True)
        else:
            queue = kombu.Queue(queue, [
                kombu.binding(exchange, routing_key=key)
                for key in routing_keys
            ],
                                exclusive=True)
        with connection.Consumer([queue], callbacks=[callback], no_ack=True):
            while True:
                connection.drain_events()
Exemplo n.º 28
0
    def run(connection, args):
        channel = connection.channel()

        exchange = kombu.Exchange('villas', type='headers', durable=True)

        producer = kombu.Producer(channel, exchange=exchange)

        message = {'action': 'start'}

        if args.parameters is not None:
            message['parameters'] = _get_parameters(args.parameters,
                                                    args.parameters_file)

        try:
            if args.model is not None:
                message['model'] = _get_parameters(args.model, args.model_file)
            if args.results is not None:
                message['results'] = _get_parameters(args.results,
                                                     args.results_file)
        except yaml.YAMLError as e:
            LOGGER.error('Failed to parse parameters: %s at line %d column %d',
                         e.msg, e.lineno, e.colno)

        producer.publish(message, headers=SimulatorCommand.get_headers(args))
Exemplo n.º 29
0
    def __init__(self,
                 url=None,
                 prefix='asgi:',
                 expiry=60,
                 group_expiry=86400,
                 capacity=100,
                 channel_capacity=None):

        try:
            init_func = import_string(self.config["INIT_FUNC"])
            init_func()
        except KeyError:
            pass
        except ImportError:
            raise RuntimeError("Cannot import INIT_FUNC")

        try:
            self.model = import_string(self.config["MODEL"])
        except KeyError:
            from .models import ChannelGroup
            self.model = ChannelGroup
        except ImportError:
            raise RuntimeError("Cannot import MODEL")

        super(AMQPChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )

        self.url = url or 'amqp://*****:*****@localhost:5672/%2F'
        self.prefix = prefix + 'tower:websocket'
        self.exchange = kombu.Exchange(self.prefix, type='topic')

        self.tdata = threading.local()
Exemplo n.º 30
0
    def __init__(self, cwuser: str, cwpass: str, workers: int = 1):
        """
        Инициализация класса для работы с АПИ
        :param cwuser: str - API username
        :param cwpass: str - API password
        :param workers: int - Number of workers to use
        """

        # TODO Разобраться с пулом работников (нельзя использовать 1 канал на всех)
        self.__lock = threading.Lock()
        self.lock = threading.Condition(self.__lock)
        self.cwuser = cwuser
        self.cwpass = cwpass
        self.url = f'amqps://{cwuser}:{cwpass}@api.chtwrs.com:5673'
        self.connected = False  # Соединение активно в данный момент
        self.connecting = False  # True, если соединение не установлено, но пытается установиться в данный момент
        self.active = True  # True при запуске, и False в самом конце, если self.active == True и
        #                   # self.connected == False, то это значит, что соединение оборвалось само.

        self.kafka_active = False

        self.guild_changes = {}
        self.guild_changes_work = None

        self.conn = None
        self.cursor = None
        self.connection = None
        self.producer = None
        self.bot = dispatcher.bot
        self.consumer_tags = []
        self.num_workers = workers  # Число работников, работающих над отправкой запросов
        self.workers = []  # Сами работники
        self.requests_queue = Queue()  # Очередь с запросами (Dict)
        self.__requests_per_second = 0  # Счётчик запросов в секунду

        self.EXCHANGE = "{}_ex".format(cwuser)
        self.ROUTING_KEY = "{}_o".format(cwuser)
        self.INBOUND = "{}_i".format(self.cwuser)

        self.exchange = kombu.Exchange(self.EXCHANGE)
        self.inbound_queue = kombu.Queue(self.INBOUND)

        self.kafka_consumer = None

        self.sent = 0
        self.got_responses = 0

        self.callbacks = {
            "createAuthCode": self.on_create_auth_code,
            "grantToken": self.on_grant_token,
            "requestProfile": self.on_request_profile,
            "guildInfo": self.on_guild_info,
            "requestGearInfo": self.on_gear_info,
            "authAdditionalOperation": self.on_request_additional_operation,
            "grantAdditionalOperation": self.on_grant_additional_operational,
            "requestStock": self.on_stock_info,
            'cw3-deals': self.on_deals,
            # 'cw3-offers': self.on_offers,  # not implemented
            'cw3-sex_digest': self.on_sex_digest,
            'cw3-yellow_pages': self.on_yellow_pages,
            # 'cw3-au_digest': self.on_au_digest,  # not implemented
        }