def test_signal_task_complete(self): job_id = 7 num_sources = 10 def test_callback(body, message): self.assertEqual(dict(job_id=job_id, num_items=num_sources), body) message.ack() exchange, conn_args = base.exchange_and_conn_args() routing_key = base.ROUTING_KEY_FMT % dict(job_id=job_id) task_signal_queue = kombu.Queue('tasks.job.%s' % job_id, exchange=exchange, routing_key=routing_key, durable=False, auto_delete=True) with kombu.BrokerConnection(**conn_args) as conn: task_signal_queue(conn.channel()).declare() with conn.Consumer(task_signal_queue, callbacks=[test_callback]): # send the signal: base.signal_task_complete(job_id=job_id, num_items=num_sources) conn.drain_events()
def _configureKombu(self): """ Configure kombu for rabbitmq """ try: connString = 'amqp://{0}:{1}@{2}:{3}//'.format( RABBITMQ['mquser'], RABBITMQ['mqpassword'], RABBITMQ['mqserver'], RABBITMQ['mqport']) self.mqConn = kombu.Connection(connString) self.alertExchange = kombu.Exchange( name=RABBITMQ['alertexchange'], type='topic', durable=True) self.alertExchange(self.mqConn).declare() alertQueue = kombu.Queue(RABBITMQ['alertqueue'], exchange=self.alertExchange) alertQueue(self.mqConn).declare() self.mqproducer = self.mqConn.Producer(serializer='json') self.log.debug('Kombu configured') except Exception as e: self.log.error('Exception while configuring kombu for alerts: {0}'.format(e))
def _publish_consume(self): results = [] def process_message(body, message): results.append(body) message.ack() task_queue = kombu.Queue('tasks', kombu.Exchange('tasks'), routing_key='tasks') to_publish = {'hello': 'world'} self.producer.publish(to_publish, exchange=task_queue.exchange, routing_key=task_queue.routing_key, declare=[task_queue]) with kombu.Consumer(self.conn, [task_queue], accept=['json'], callbacks=[process_message]) as consumer: Pin.override(consumer, service='kombu-patch', tracer=self.tracer) self.conn.drain_events(timeout=2) self.assertEqual(results[0], to_publish)
def publish(self, message: Union[dict, str] = "", headers: dict = None, exchange: str = EXCHANGE, routing_key: str = ROUTING_KEY): """ Publish a message to th AMQP Queue :param message: message to be published :param headers: header key-values to publish with the message :param exchange: specifies the top level specifier for message publish :param routing_key: determines which queue the message is published to """ self._conn.connect() queue = kombu.Queue(routing_key, kombu.Exchange(exchange, type="topic"), routing_key=routing_key) queue.maybe_bind(self._conn) queue.declare() producer = kombu.Producer(self._conn.channel()) producer.publish( message, headers=headers or {}, exchange=queue.exchange, routing_key=queue.routing_key, declare=[queue] ) producer.close() self._conn.release()
def __init__(self, rabbit_hosts, rabbit_port, rabbit_user, rabbit_password, rabbit_vhost, rabbit_ha_mode, q_name, subscribe_cb, logger): super(VncKombuClientV2, self).__init__(rabbit_hosts, rabbit_port, rabbit_user, rabbit_password, rabbit_vhost, rabbit_ha_mode, q_name, subscribe_cb, logger) _hosts = self._parse_rabbit_hosts(rabbit_hosts) self._urls = [] for h in _hosts: h['vhost'] = "" if not rabbit_vhost else rabbit_vhost _url = "pyamqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s/" % h self._urls.append(_url) msg = "Initializing RabbitMQ connection, urls %s" % self._urls self._logger(msg, level=SandeshLevel.SYS_NOTICE) self._update_sandesh_status(ConnectionStatus.INIT) self._conn_state = ConnectionStatus.INIT self._conn = kombu.Connection(self._urls) queue_args = {"x-ha-policy": "all"} if rabbit_ha_mode else None self._update_queue_obj = kombu.Queue(q_name, self.obj_upd_exchange, queue_arguments=queue_args) self._start()
def receive(self, channels, block=False): if not channels: return None, None self._init_thread() # bind this queue to messages sent to any of the routing_keys # in the channels set. incoming_routing_keys = [channel_to_routing_key(channel) for channel in channels] new_routing_keys = set(incoming_routing_keys).difference(self.tdata.routing_keys) self.tdata.routing_keys = new_routing_keys.union(self.tdata.routing_keys) for nrk in new_routing_keys: if nrk.endswith('.'): nrk += '*' queue = kombu.Queue(name=self.prefix+':{}'.format(nrk), exchange=self.exchange, durable=False, exclusive=False, auto_delete=True, routing_key=nrk) self.tdata.consumer.add_queue(queue) self.tdata.consumer.consume() while True: # check local buffer for messages if self.tdata.buffer: message = self.tdata.buffer.popleft() channel = routing_key_to_channel(message.delivery_info['routing_key']) return channel, self.deserialize(message.body) try: self.tdata.connection.drain_events(timeout=1) except socket.timeout: break except self.tdata.connection.recoverable_connection_errors: self.recover() self.tdata.consumer.consume() return None, None
def __init__(self, conf): super(KombuRPCClient, self).__init__(conf) self.exchange = conf.get('exchange', '') self.user_id = conf.get('user_id', 'guest') self.password = conf.get('password', 'guest') self.topic = conf.get('topic', 'mistral') self.server_id = conf.get('server_id', '') self.host = conf.get('host', 'localhost') self.port = conf.get('port', 5672) self.virtual_host = conf.get('virtual_host', '/') self.durable_queue = conf.get('durable_queues', False) self.auto_delete = conf.get('auto_delete', False) self._timeout = conf.get('timeout', 60) self.conn = self._make_connection(self.host, self.port, self.user_id, self.password, self.virtual_host) # Create exchange. exchange = self._make_exchange(self.exchange, durable=self.durable_queue, auto_delete=self.auto_delete) # Create queue. queue_name = utils.generate_unicode_uuid() self.callback_queue = kombu.Queue(queue_name, exchange=exchange, routing_key=queue_name, durable=False, exclusive=True, auto_delete=True) # Create consumer. self.consumer = kombu.Consumer(channel=self.conn.channel(), queues=self.callback_queue, callbacks=[self._on_response], accept=['pickle', 'json']) self.consumer.qos(prefetch_count=1)
def test_user_published(self): idm_broker_config = apps.get_app_config('forsta_broker') with idm_broker_config.broker.acquire(block=True) as conn: queue = kombu.Queue(exclusive=True).bind(conn) queue.declare() queue.bind_to(exchange=kombu.Exchange('idm.auth.user'), routing_key='#') connection = transaction.get_connection() self.assertFalse(connection.in_atomic_block) with transaction.atomic(): user = User.objects.create(identity_id=uuid.uuid4(), primary=True, is_active=True) for i in range(5): message = queue.get() if message: break time.sleep(0.1) self.assertIsInstance(message, Message) self.assertEqual(message.delivery_info['routing_key'], 'User.created.{}'.format(str(user.id))) self.assertEqual(message.content_type, 'application/json') self.assertEqual( json.loads(message.body.decode())['@type'], 'User')
def _setup_connection(self): """Returns True if a valid connection exists already, or if one can be created.""" if self.conn: return True id_conf = read_conf(ID_CONF_FILE_NAME) hosts = id_conf.get('rabbit_hosts', None) if hosts is not None: host = hosts[0] else: host = id_conf.get('rabbit_host', None) if host is None: log.warning("no host info in configuration, can't set up rabbit.") return False try: url = "amqp://{}:{}@{}/{}".format(id_conf['rabbit_userid'], id_conf['rabbit_password'], host, id_conf['rabbit_virtual_host']) self.conn = kombu.BrokerConnection(url) self.exchange = kombu.Exchange("glance-simplestreams-sync-status") status_queue = kombu.Queue("glance-simplestreams-sync-status", exchange=self.exchange) status_queue(self.conn.channel()).declare() except: log.exception("Exception during kombu setup") return False return True
def ensure_consuming(self, force=False): if self.closed and not force: return while True: try: if self.queue: return with ConnectionContext(self.connection, no_release=True) as conn: channel = conn.default_channel print('channel:', conn.__dict__) # Queue 생성 self.queue = kombu.Queue(sock.gethostname() + "-(ensure_consuming)-" + self.pid, auto_delete=True, expires=RABBITMQ_QUEUE_EXPIRES, channel=channel) self.queue.declare() # Consumer 생성 self.consumer = kombu.Consumer( channel, [self.queue], no_ack=False, # on_message= prefetch_count=1) self.consumer.consume() except BrokenPipeError as err: self.queue = None conn.release() except Exception as err: print(err) self.queue = None conn.release() break
def get_consumers(self, Consumer, channel): """ Returns a list of kombu.Consumer instances to service all registered notification callbacks. If using the kombu.mixin.ConsumerMixin mixin class, these instances should be included in its get_consumers() method. :param Consumer: Message consumer class. :type Consumer: class :param channel: An open channel. :type channel: kombu.transport.*.Channel :returns: A list of consumer instances :rtype: [kombu.Consumer, ....] """ consumer_list = [] exchange = self.bus_mixin.producer.exchange for routing_key, callbacks in self.notify_callbacks.items(): queue = kombu.Queue(exchange=exchange, routing_key=routing_key) consumer = Consumer(queues=queue, callbacks=callbacks) consumer_list.append(consumer) self.bus_mixin.logger.info('Listening for "%s" notifications', routing_key) return consumer_list
def get_listener(conn, userid, exchanges=None, extra_data=None, logger=None): """Obtain a Pulse consumer that can handle received messages. Returns a ``Listener`` instance bound to listen to the requested exchanges. Callers should use ``add_callback`` to register functions that will be called when a message is received. The callback functions receive one argument ``body``, the decoded message body. """ queues = [] if exchanges is None: raise ValueError("No exchanges supplied") for queue_name, exchange_name, key_name in exchanges: queue_name = 'queue/%s/%s' % (userid, queue_name) exchange = kombu.Exchange(exchange_name, type='topic', channel=conn) exchange.declare(passive=True) queue = kombu.Queue(name=queue_name, exchange=exchange, durable=True, routing_key=key_name, exclusive=False, auto_delete=False, channel=conn, extra_data=extra_data) queues.append(queue) # queue.declare() declares the exchange, which isn't allowed by the # server. So call the low-level APIs to only declare the queue itself. queue.queue_declare() queue.queue_bind() return Listener(conn, [item[1] for item in exchanges], queues, logger)
def _consume_amqp(self): # XXX https://webarchive.jira.com/browse/ARI-3811 # After running for some amount of time (3 weeks in the latest case), # consumer looks normal but doesn't consume any messages. Not clear if # it's hanging in drain_events() or not. As a temporary measure for # mitigation (if it works) or debugging (if it doesn't work), close and # reopen the connection every 2.5 hours RECONNECT_AFTER_SECONDS = 150 * 60 url_queue = kombu.Queue(self.queue_name, exchange=self._exchange, routing_key=self.routing_key) while not self._consumer_stop.is_set(): try: self.logger.info("connecting to amqp exchange={} at {}".format( self._exchange.name, self.amqp_url)) self._reconnect_requested = False with kombu.Connection(self.amqp_url) as conn: conn.default_channel.basic_qos( prefetch_count=self.max_active_browsers, prefetch_size=0, a_global=False) with conn.Consumer(url_queue) as consumer: self._wait_for_and_browse_urls( conn, consumer, timeout=RECONNECT_AFTER_SECONDS) # need to wait for browsers to finish here, before closing # the amqp connection, because they use it to do # message.ack() after they finish browsing a page self._wait_for_active_browsers() except BaseException as e: self.logger.error("caught exception {}".format(e), exc_info=True) time.sleep(0.5) self.logger.error("attempting to reopen amqp connection")
def test_publish_consume(self, connection): test_queue = kombu.Queue('ttl_test', routing_key='ttl_test') def callback(body, message): assert False, 'Callback should not be called' with connection as conn: with conn.channel() as channel: producer = kombu.Producer(channel) producer.publish({'hello': 'world'}, retry=True, exchange=test_queue.exchange, routing_key=test_queue.routing_key, declare=[test_queue], serializer='pickle', expiration=2) consumer = kombu.Consumer(conn, [test_queue], accept=['pickle']) consumer.register_callback(callback) sleep(3) with consumer: with pytest.raises(socket.timeout): conn.drain_events(timeout=1)
def _set_up_queues(self, notification_level): if notification_level not in ['info', 'debug', 'warning', 'error']: msg = "Unrecongized notification level: " + str(notification_level) + \ "\nPlease enter a valid notification level from: 'info', 'debug', 'warning', 'error'" return 0 sub_queue_names = [] sub_queues = [] log_levels = [] if notification_level == "debug": log_levels = ['debug', 'info', 'warning', 'error'] elif notification_level == "info": log_levels = ['info', 'warning', 'error'] elif notification_level == "warning": log_levels = ['warning', 'error'] elif notification_level == "error": log_levels = ['error'] for level in log_levels: sub_queue_names.append('ironic_versioned_notifications.'+str(level)) for sub_queue_name in sub_queue_names: sub_queues.append(kombu.Queue(str(sub_queue_name), durable=False, exchange=self._exchange, routing_key=str(sub_queue_name))) return sub_queues
def pulse_consumer(exchange, request): exchange_name = 'exchange/{}/v1/{}'.format( settings.PULSE_EXCHANGE_NAMESPACE, exchange) connection = kombu.Connection(settings.PULSE_URI) exchange = kombu.Exchange(name=exchange_name, type='topic') queue = kombu.Queue( no_ack=True, exchange=exchange, # Exchange name routing_key='#', # Bind to all messages auto_delete=True, # Delete after each test exclusive=False) # Disallow multiple consumers simpleQueue = connection.SimpleQueue(name=queue, channel=connection, no_ack=True) def fin(): connection.release() request.addfinalizer(fin) return simpleQueue
def __init__(self, url, queue, headers, allowed, username='******', password='******', **kwargs): super(KombuPublisher, self).__init__(allowed, **kwargs) self._url = url self.queue = queue self._headers = headers self.username = username self.password = password self.exchange = kombu.Exchange(name='amq.direct', type='direct') self._queue = kombu.Queue(name=queue, exchange=self.exchange, routing_key=queue) self.connection = kombu.Connection(self._url, userid=self.username, password=self.password) self.producer = kombu.Producer(self.connection, routing_key=self.queue, exchange=self.exchange)
def __init__(self, options): self.options = options self.uuid = utils.uuid4() self.amqp_url = 'amqp://localhost' self.requests = {} self.p2p_exchange = kombu.Exchange(self.P2P_EXCHANGE, type='topic', passive=True) self.broadcast_exchange = kombu.Exchange(self.BROADCAST_EXCHANGE, type='topic', passive=True) self.reply_queue_name = self.QUEUE_PREFIX % self.uuid self.reply_queue = kombu.Queue(self.reply_queue_name, exchange=self.p2p_exchange, routing_key=self.reply_queue_name, auto_delete=True) self.api_event_queue_name = self.API_EVENT_QUEUE_PREFIX % self.uuid self.api_event_queue = kombu.Queue( self.api_event_queue_name, exchange=self.broadcast_exchange, routing_key=self.API_EVENT_QUEUE_BINDING_KEY, auto_delete=True) self.should_stop = False self.reply_connection = None self.reply_consumer = None def start_reply_consuming(): try: log.debug('reply consumer thread starts') with kombu.Connection(self.amqp_url) as conn: self.reply_connection = conn with conn.Consumer([self.reply_queue], callbacks=[self._message_handler ]) as consumer: self.reply_consumer = consumer while not self.should_stop: conn.drain_events() except Exception as ce: if 'exchange.declare' in str(ce): log.info( 'cannot declare RabbitMQ exchange(P2P), you need to start ZStack management server before starting dashboard' ) os._exit(1) else: raise ce self.api_tasks = {} self.reply_consumer_thread = threading.Thread( target=start_reply_consuming) self.reply_consumer_thread.start() self.api_event_connection = None self.api_event_consumer = None def start_api_event_consuming(): try: log.debug('api event consumer thread starts') with kombu.Connection(self.amqp_url) as conn: self.api_event_connection = conn with conn.Consumer([self.api_event_queue], callbacks=[self._api_event_handler ]) as consumer: self.api_event_consumer = consumer while not self.should_stop: conn.drain_events() except Exception as ce: if 'exchange.declare' in str(ce): log.info( 'cannot declare RabbitMQ exchange(BROADCAST), you need to start ZStack management server before starting dashboard' ) os._exit(1) else: raise ce self.api_event_consumer_thread = threading.Thread( target=start_api_event_consuming) self.api_event_consumer_thread.start() self.producer_connection = kombu.Connection(self.amqp_url)
def __queue(self, name): queue_name = self.__queue_name(name) queue = kombu.Queue(queue_name, self.__exchange, routing_key=queue_name) return queue
"baz", "abc", "def", "ghi", "jkl", "mno", "boogaloo", ] with kombu.Connection("amqp://*****:*****@localhost:5672") as conn: channel = conn.channel() exchange = kombu.Exchange("input_exchange", type="topic") exchange.declare(channel=channel) queue = kombu.Queue("test_queue", exchange=exchange, durable=True, message_ttl=3600) queue.declare(channel=channel) queue.queue_bind(channel=channel) with producers[conn].acquire(block=True) as producer: while True: producer.publish( { "uid1": random.choice(uids), "start_time": datetime.now() - timedelta(minutes=5), "stop_time": datetime.now(), }, exchange=exchange, ) producer.publish( {
""" Close session """ if self.state not in (STATE_CLOSING, STATE_CLOSED): self.state = STATE_CLOSING self.add_message(FRAME_CLOSE, (code, reason)) if self._debug: log.debug('[sockjs_flask] Session closed: %s', self.id) _marker = object() exchange = kombu.Exchange('subscription', type='topic', auto_delet=True, delivery_mode=1) _queues = [kombu.Queue('video', exchange=exchange, key='video')] class SessionManager(dict): """ A basic session manager """ __slots__ = ('name', 'route_name', 'app', 'handler', 'factory', 'hub', 'acquired', 'session', 'heartbeat', 'timeout', 'debug', 'broker_url', 'sessions', '_hb_handle', '_hb_task', 'registry', '__weakref__') def __init__(self, name, app, handler, broker_url=None,
def __init__(self, mysql_api_host_write, mysql_api_host_read, mysql_api_port, mysql_api_user, mysql_api_pwd, mysql_api_db, aws_access_key_id, aws_secret_access_key, s3_upload_bucket, s3_upload_key_prefix, s3_upload_song_key_prefix, rbmq_hosts, rbmq_port, rbmq_user, rbmq_pwd, rbmq_vhost, inbound_queue, inbound_routing_key, video_download_dir, console_log_level="DEBUG", file_log_level="INFO", log_file=None, lossless=1, s3_download_bucket=None, s3_download_key_prefix=None, static_vframes=1, dynamic_vframes=9, jpg_w_size='1334', jpg_h_size='750', webp_w_size='667', webp_h_size='375', webp_w_small_size='333', webp_h_small_size='187', webp_loop=0, webp_fps=10, audio_bitrate=19200, ffmpeg_preset_webp='default', log_interval=5, log_backup_count=20, clean_folder='True'): (rbmq_url, rbmq_alt_urls) = get_rbmq_urls(rbmq_hosts, rbmq_port, rbmq_user, rbmq_pwd) self.rbmq_url = rbmq_url self.rbmq_vhost = rbmq_vhost self.rbmq_alt_urls = rbmq_alt_urls self.connection = kombu.Connection(self.rbmq_url, alternates=self.rbmq_alt_urls, failover_strategy='round-robin', virtual_host=self.rbmq_vhost) # self.exchange = kombu.Exchange(exchange, type=exchange_type) self.inbound_queue = kombu.Queue(name=inbound_queue, routing_key=inbound_routing_key, channel=self.connection, auto_declare=False) # self.outbound_queue = kombu.Queue(name=outbound_queue, exchange=self.exchange, routing_key=outbound_routing_key, channel=self.connection, auto_declare=False) self.clean_folder = clean_folder super().__init__(mysql_api_host_write=mysql_api_host_write, mysql_api_host_read=mysql_api_host_read, mysql_api_port=mysql_api_port, mysql_api_user=mysql_api_user, mysql_api_pwd=mysql_api_pwd, mysql_api_db=mysql_api_db, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, s3_upload_bucket=s3_upload_bucket, s3_upload_key_prefix=s3_upload_key_prefix, video_download_dir=video_download_dir, console_log_level=console_log_level, log_file=log_file, file_log_level=file_log_level, ffmpeg_preset_webp=ffmpeg_preset_webp, log_interval=log_interval, log_backup_count=log_backup_count) self.logger.debug( f"[WebpWorker] rbmq_url={self.rbmq_url}, rbmq_alt_urls={self.rbmq_alt_urls}" ) # Setup download mode if s3_download_key_prefix is not None and s3_download_bucket is not None: self.download_mode = "s3" else: s3_download_bucket, s3_download_key_prefix = s3_upload_bucket, s3_upload_key_prefix self.download_mode = "s3" self.s3_download_bucket = s3_download_bucket self.static_vframes = static_vframes self.dynamic_vframes = dynamic_vframes self.lossless = lossless self.jpg_w_size = jpg_w_size self.jpg_h_size = jpg_h_size self.webp_w_size = webp_w_size self.webp_h_size = webp_h_size self.webp_w_small_size = webp_w_small_size self.webp_h_small_size = webp_h_small_size self.webp_loop = webp_loop self.webp_fps = webp_fps self.audio_bitrate = audio_bitrate self.s3_upload_song_key_prefix = s3_upload_song_key_prefix.strip("/")
def __init__(self, host: str = AMQP_HOST, port: int = AMQP_PORT, binding: Bindings = None, callbacks: Callbacks = None, debug: bool = False, **kwargs): """ Consume message from the given bindings :param host: host running RabbitMQ :param port: port which handles AMQP (default 5672) :param binding: Queue/Exchange bindings to listen on Dict[ str, # Queue Name List[Union[ # Exchange str, # Exchange Name, type is direct List[Tuple[ str, # Exchange Name Literal['direct', 'fanout', 'headers', 'topic'] # exchange type ]] ]] ] :param callbacks: list of callback functions which are called upon receiving a message :param debug: print debugging messages :param **kwargs: extra args - Backwards compatibility: :param exchange: specifies where to read messages from :param routing_key: """ super().__init__() self._exit = Event() self._url = f"amqp://{host}:{port}" self._debug = debug self._queues = [] if isinstance(callbacks, (list, tuple)): self._callbacks = [ f for f in callbacks if isfunction(f) or isinstance(f, partial) ] else: self._callbacks = [] # Initialize connection we are consuming from based on defaults/passed params self._conn = kombu.Connection(hostname=host, port=port, userid="guest", password="******", virtual_host="/") if binding: for queue, exchanges in binding.items(): queue_bindings = [] for exchange in exchanges: name, _type, key = (exchange, 'direct', queue) if isinstance( exchange, str) else exchange queue_bindings.append( kombu.binding(exchange=kombu.Exchange(name, type=_type), routing_key=key)) self._queues.append( kombu.Queue(name=queue, bindings=queue_bindings)) elif 'exchange' in kwargs and 'routing_key' in kwargs: exchange = kombu.Exchange(kwargs['exchange'], type="direct") key = kwargs['routing_key'] # At this point, consumers are reading messages regardless of queue name # so I am just setting it to be the same as the exchange. self._queues = [ kombu.Queue(name=key, bindings=[ kombu.binding(exchange=exchange, routing_key=key) ]) ] # Start consumer as an independent process self.start() if self._debug: print(f"Connected to {self._url}")
def _queue(self): queue_name = 'flask-socketio.' + str(uuid.uuid4()) return kombu.Queue(queue_name, self._exchange(), durable=False, queue_arguments={'x-expires': 300000})
os.environ.get('OIDC_CLIENT_ID', ''), 'client_secret': os.environ.get('OIDC_CLIENT_SECRET', ''), 'signing_alg': os.environ.get('OIDC_SIGNING_ALG', 'RS256') }, 'SCOPES': os.environ.get('OIDC_SCOPES', 'identity').split(), 'PROCESS_USERINFO': 'idm_core.auth.process_userinfo', } IDM_BROKER = { 'CONSUMERS': [{ 'queues': [ kombu.Queue('idm.core.user', exchange=kombu.Exchange('idm.auth.user', type='topic', passive=True), routing_key='#') ], 'tasks': ['idm_core.tasks.update_user'], }], } SESSION_COOKIE_NAME = 'idm-core-sessionid' LOGIN_URL = 'oidc-login' LOGOUT_URL = 'logout' l = logging.getLogger('django.db.backends') l.setLevel(logging.DEBUG) l.addHandler(logging.StreamHandler())
def _setup_connection(self): """Returns True if a valid connection exists already, or if one can be created.""" if self.conn: return True id_conf = read_conf(ID_CONF_FILE_NAME) # The indentity.yaml file contains either a singular string variable # 'rabbit_host', or a comma separated list in the plural variable # 'rabbit_hosts' host = None hosts = id_conf.get('rabbit_hosts', None) if hosts is not None: host = hosts.split(",")[0] else: host = id_conf.get('rabbit_host', None) if host is None: log.warning("no host info in configuration, can't set up rabbit.") return False try: # amqp:// implies librabbitmq if available, otherwise pyamqp # librabbitmq doesn't support SSL # use pyamqp:// explicitly for SSL url = "pyamqp://{}:{}@{}/{}".format( id_conf['rabbit_userid'], id_conf['rabbit_password'], host, id_conf['rabbit_virtual_host']) ssl = None if 'rabbit_use_ssl' in id_conf: if 'ssl_ca' in id_conf: cacert = CACERT_FILE else: cacert = SYSTEM_CACERT_FILE try: os.makedirs('/usr/local/share/ca-certificates') except os.error: # ignore existence of already created directory pass with open('/usr/local/share/ca-certificates/' 'glance-simplestreams-sync.crt', 'w') as f: f.write( base64.b64decode(id_conf['kombu_ssl_ca_certs'])) subprocess.check_call( ['/usr/sbin/update-ca-certificates', '--fresh']) ssl = {'ca_certs': cacert} self.conn = kombu.BrokerConnection(url, ssl=ssl) self.exchange = kombu.Exchange("glance-simplestreams-sync-status") status_queue = kombu.Queue("glance-simplestreams-sync-status", exchange=self.exchange) status_queue(self.conn.channel()).declare() except: log.exception("Exception during kombu setup") return False return True
import time import kombu hosts = [ 'amqp://*****:*****@localhost:5673//', 'amqp://*****:*****@localhost:5672//' ] connection = kombu.Connection(hosts) connection.ensure_connection() connection.connect() channel = connection.channel() #exchange = kombu.Exchange('some-exchange') #queue = kombu.Queue(name='some-queue', exchange=exchange) exchange = kombu.Exchange('kombu_demo', type='direct') queue = kombu.Queue('kombu_demo', exchange, routing_key='kombu_demo') cnt = 0 def callback(body, message): global cnt print('%d: got msg - %s' % (cnt, body)) message.ack() cnt += 1 consumer = kombu.Consumer(channel, queues=queue, callbacks=[callback]) #consumer.consume() while True: try:
import kombu from push import settings, models exchange = kombu.Exchange( name=settings.PUSH_AMQP_EXCHANGE, type='topic', ) connection = kombu.Connection(settings.PUSH_AMQP_CONNECTION) apns_queue = kombu.Queue( settings.PUSH_AMQP_QUEUE_PREFIX + models.DeviceOS.iOS.name, exchange=exchange, routing_key=models.DeviceOS.iOS.name, ) fcm_queue = kombu.Queue( settings.PUSH_AMQP_QUEUE_PREFIX + models.DeviceOS.Android.name, exchange=exchange, routing_key=models.DeviceOS.Android.name, )
def __init__(self, cwuser: str, cwpass: str, workers: int = 1): """ Инициализация класса для работы с АПИ :param cwuser: str - API username :param cwpass: str - API password :param workers: int - Number of workers to use """ # TODO Разобраться с пулом работников (нельзя использовать 1 канал на всех) self.__lock = threading.Lock() self.lock = threading.Condition(self.__lock) self.cwuser = cwuser self.cwpass = cwpass self.url = f'amqps://{cwuser}:{cwpass}@api.chtwrs.com:5673' self.connected = False # Соединение активно в данный момент self.connecting = False # True, если соединение не установлено, но пытается установиться в данный момент self.active = True # True при запуске, и False в самом конце, если self.active == True и # # self.connected == False, то это значит, что соединение оборвалось само. self.kafka_active = False self.guild_changes = {} self.guild_changes_work = None self.conn = None self.cursor = None self.connection = None self.producer = None self.bot = dispatcher.bot self.consumer_tags = [] self.num_workers = workers # Число работников, работающих над отправкой запросов self.workers = [] # Сами работники self.requests_queue = Queue() # Очередь с запросами (Dict) self.__requests_per_second = 0 # Счётчик запросов в секунду self.EXCHANGE = "{}_ex".format(cwuser) self.ROUTING_KEY = "{}_o".format(cwuser) self.INBOUND = "{}_i".format(self.cwuser) self.exchange = kombu.Exchange(self.EXCHANGE) self.inbound_queue = kombu.Queue(self.INBOUND) self.kafka_consumer = None self.sent = 0 self.got_responses = 0 self.callbacks = { "createAuthCode": self.on_create_auth_code, "grantToken": self.on_grant_token, "requestProfile": self.on_request_profile, "guildInfo": self.on_guild_info, "requestGearInfo": self.on_gear_info, "authAdditionalOperation": self.on_request_additional_operation, "grantAdditionalOperation": self.on_grant_additional_operational, "requestStock": self.on_stock_info, 'cw3-deals': self.on_deals, # 'cw3-offers': self.on_offers, # not implemented 'cw3-sex_digest': self.on_sex_digest, 'cw3-yellow_pages': self.on_yellow_pages, # 'cw3-au_digest': self.on_au_digest, # not implemented }
class Service: _QUEUE = kombu.Queue( exchange=kombu.Exchange('xivo', type='topic'), routing_key='auth.tenants.*.created', exclusive=True, ) def __init__(self, config, bus, controller): self._bus = bus self._config = config self._controller = controller bus.add_consumer(self._QUEUE, self._on_new_tenant) def get_current_config(self): return self._config def _on_new_tenant(self, body, message): try: event = Marshaler.unmarshal_message(body, TenantCreatedEvent) body = event.marshal() uuid = body['uuid'] name = body['name'] except (InvalidMessage, KeyError): logger.info('Ignoring the following malformed bus message: %s', body) else: sources = self._auto_create_sources(uuid, name) display = self._auto_create_display(uuid, name) self._auto_create_profile(uuid, name, display, sources) finally: message.ack() def _add_source(self, backend, body): source_service = self._controller.services.get('source') try: source = source_service.create(backend, **body) logger.info('auto created %s source %s', backend, body) return source except Exception as e: logger.info('failed to create %s source %s', backend, e) def _add_conference_source(self, tenant_uuid, name): backend = 'conference' body = dict(CONFERENCE_SOURCE_BODY) body['name'] = 'auto_{}_{}'.format(backend, name) body['tenant_uuid'] = tenant_uuid return self._add_source(backend, body) def _add_personal_source(self, tenant_uuid, name): backend = 'personal' body = dict(PERSONAL_SOURCE_BODY) body['tenant_uuid'] = tenant_uuid return self._add_source(backend, body) def _add_wazo_user_source(self, tenant_uuid, name): backend = 'wazo' body = dict(WAZO_SOURCE_BODY) body['name'] = 'auto_{}_{}'.format(backend, name) body['tenant_uuid'] = tenant_uuid return self._add_source(backend, body) def _add_office365_source(self, tenant_uuid, name): backend = 'office365' body = dict(OFFICE_365_SOURCE_BODY) body['name'] = 'auto_{}_{}'.format(backend, name) body['tenant_uuid'] = tenant_uuid return self._add_source(backend, body) def _auto_create_sources(self, tenant_uuid, name): logger.info('creating sources for tenant "%s"', name) sources = [ self._add_conference_source(tenant_uuid, name), self._add_personal_source(tenant_uuid, name), self._add_wazo_user_source(tenant_uuid, name), self._add_office365_source(tenant_uuid, name), ] return [s for s in sources if s is not None] def _auto_create_display(self, tenant_uuid, name): display_service = self._controller.services.get('display') try: display = display_service.create( tenant_uuid=tenant_uuid, name='auto_{}'.format(name), columns=DEFAULT_DISPLAY_COLUMNS, ) logger.info( 'display %s auto created for tenant %s', display['uuid'], display['tenant_uuid'], ) return display except Exception as e: logger.info('auto display creation failed %s', e) def _auto_create_profile(self, tenant_uuid, name, display, sources): logger.info('creating a new profile for tenant "%s"', name) body = { 'name': 'default', 'tenant_uuid': tenant_uuid, 'display': display, 'services': { 'lookup': { 'sources': sources }, 'favorites': { 'sources': sources }, 'reverse': { 'sources': sources, 'timeout': 0.5, }, }, } profile_service = self._controller.services.get('profile') try: profile = profile_service.create(**body) logger.info('auto created profile %s', profile) except Exception as e: logger.info('auto profile creation failes %s', e)