def init_host(self): # Use the nova configuration flags to get # a connection to the RPC mechanism nova # is using. self.connection = rpc.create_connection() storage.register_opts(cfg.CONF) self.storage_engine = storage.get_engine(cfg.CONF) self.storage_conn = self.storage_engine.get_connection(cfg.CONF) self.handler = dispatcher.NotificationDispatcher( COLLECTOR_NAMESPACE, self._publish_counter, ) # FIXME(dhellmann): Should be using create_worker(), except # that notification messages do not conform to the RPC # invocation protocol (they do not include a "method" # parameter). for topic in self.handler.topics: self.connection.declare_topic_consumer( topic=topic, queue_name="ceilometer.notifications", callback=functools.partial(self.handler.notify, topic)) # Set ourselves up as a separate worker for the metering data, # since the default for manager is to use create_consumer(). self.connection.create_worker( cfg.CONF.metering_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.collector.' + cfg.CONF.metering_topic, ) self.connection.consume_in_thread()
def start(self): super(Service, self).start() self.conn = rpc.create_connection(new=True) LOG.debug( _("Creating Consumer connection for Service %s") % self.topic) dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], self.serializer) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, dispatcher, fanout=False) node_topic = '%s.%s' % (self.topic, self.host) self.conn.create_consumer(node_topic, dispatcher, fanout=False) self.conn.create_consumer(self.topic, dispatcher, fanout=True) # Hook to allow the manager to do other initializations after # the rpc connection is created. if callable(getattr(self.manager, 'initialize_service_hook', None)): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread self.conn.consume_in_thread()
def start(self): super(Service, self).start() self.conn = rpc.create_connection(new=True) LOG.debug(_("Creating Consumer connection for Service %s") % self.topic) dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], self.serializer) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, dispatcher, fanout=False) node_topic = '%s.%s' % (self.topic, self.host) self.conn.create_consumer(node_topic, dispatcher, fanout=False) self.conn.create_consumer(self.topic, dispatcher, fanout=True) # Hook to allow the manager to do other initializations after # the rpc connection is created. if callable(getattr(self.manager, 'initialize_service_hook', None)): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread self.conn.consume_in_thread()
def main(): # process configuration file (if exists) and setup logging config = SafeConfigParser(cfg_defaults) config.add_section('common') for key, value in cfg_defaults.items(): config.set('common', key, str(value)) if config.read(cfg_filename): logging.config.fileConfig(cfg_filename) else: logging.basicConfig(stream=sys.stdout, level=cfg_defaults['logLevel'], format=cfg_defaults['logFormat']) # process command line arguments parser = OptionParser(version='{0} {1}'.format(__description__, __version__), description=__description__) parser.add_option('-b', '--brokerUrl', dest='brokerUrl', metavar='URL', type='string', default=config.get('common', 'brokerUrl'), help='context broker URL [default=%default]'), parser.add_option('-l', '--logLevel', dest='logLevel', metavar='LEVEL', choices=[level for level in logging._levelNames.keys() if isinstance(level, str)], default=config.get('common', 'logLevel'), help='logging level [default=%default]') (opts, args) = parser.parse_args() # @UnusedVariable config.set('common', 'brokerUrl', opts.brokerUrl) config.set('common', 'logLevel', opts.logLevel) logging.root.setLevel(opts.logLevel) # rpc connection connection = rpc.create_connection() try: logging.info('Context Broker URL: %s', config.get('common', 'brokerUrl')) listen(connection, config) finally: connection.close() return 0
def main(): rpc.register_opts(cfg.CONF) cfg.CONF.register_opts([ cfg.StrOpt('datafile', default=None, help='Data file to read or write', ), cfg.BoolOpt('record', help='Record events', ), cfg.BoolOpt('replay', help='Replay events', ), ]) remaining_args = cfg.CONF(sys.argv) #utils.monkey_patch() parser = argparse.ArgumentParser( description='record or play back notification events', ) parser.add_argument('mode', choices=('record', 'replay', 'monitor'), help='operating mode', ) parser.add_argument('data_file', default='msgs.dat', nargs='?', help='the data file to read or write', ) parser.add_argument('--topic', default='notifications.info', help='the exchange topic to listen for', ) args = parser.parse_args(remaining_args[1:]) console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) root_logger = logging.getLogger('') root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) connection = rpc.create_connection() try: if args.mode == 'replay': with open(args.data_file, 'rb') as input: send_messages(connection, args.topic, input) elif args.mode == 'record': with open(args.data_file, 'wb') as output: record_messages(connection, args.topic, output) elif args.mode == 'monitor': monitor_messages(connection, args.topic) finally: connection.close() return 0
def main(): rpc.register_opts(FLAGS) FLAGS.register_opts([ cfg.StrOpt('datafile', default=None, help='Data file to read or write', ), cfg.BoolOpt('record', help='Record events', ), cfg.BoolOpt('replay', help='Replay events', ), ]) remaining_args = FLAGS(sys.argv) utils.monkey_patch() parser = argparse.ArgumentParser( description='record or play back notification events', ) parser.add_argument('mode', choices=('record', 'replay', 'monitor'), help='operating mode', ) parser.add_argument('data_file', default='msgs.dat', nargs='?', help='the data file to read or write', ) parser.add_argument('--topic', default='notifications.info', help='the exchange topic to listen for', ) args = parser.parse_args(remaining_args[1:]) console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) root_logger = logging.getLogger('') root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) connection = rpc.create_connection() try: if args.mode == 'replay': with open(args.data_file, 'rb') as input: send_messages(connection, args.topic, input) elif args.mode == 'record': with open(args.data_file, 'wb') as output: record_messages(connection, args.topic, output) elif args.mode == 'monitor': monitor_messages(connection, args.topic) finally: connection.close() return 0
def start(self): super(Service, self).start() self.conn = rpc.create_connection(new=True) LOG.debug(_("Creating Consumer connection for Service %s") % self.topic) rpc_dispatcher = rpc.dispatcher.RpcDispatcher([self.manager]) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=False) node_topic = '%s.%s' % (self.topic, self.host) self.conn.create_consumer(node_topic, rpc_dispatcher, fanout=False) self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=True) # Consume from all consumers in a thread self.conn.consume_in_thread()
def init_host(self): # Use the nova configuration flags to get # a connection to the RPC mechanism nova # is using. self.connection = rpc.create_connection() storage.register_opts(cfg.CONF) self.storage_engine = storage.get_engine(cfg.CONF) self.storage_conn = self.storage_engine.get_connection(cfg.CONF) self.handlers = self._load_plugins(self.COLLECTOR_NAMESPACE) if not self.handlers: LOG.warning('Failed to load any notification handlers for %s', self.plugin_namespace) # FIXME(dhellmann): Should be using create_worker(), except # that notification messages do not conform to the RPC # invocation protocol (they do not include a "method" # parameter). # FIXME(dhellmann): Break this out into its own method # so we can test the subscription logic. for handler in self.handlers: LOG.debug('Event types: %r', handler.get_event_types()) for exchange_topic in handler.get_exchange_topics(cfg.CONF): for topic in exchange_topic.topics: self.connection.declare_topic_consumer( queue_name="ceilometer.notifications", topic=topic, exchange_name=exchange_topic.exchange, callback=self.process_notification, ) # Set ourselves up as a separate worker for the metering data, # since the default for manager is to use create_consumer(). self.connection.create_worker( cfg.CONF.metering_topic, rpc_dispatcher.RpcDispatcher([self]), 'ceilometer.collector.' + cfg.CONF.metering_topic, ) self.connection.consume_in_thread()
def main(): rpc.register_opts(FLAGS) FLAGS.register_opts( [ cfg.StrOpt("datafile", default=None, help="Data file to read or write"), cfg.BoolOpt("record", help="Record events"), cfg.BoolOpt("replay", help="Replay events"), ] ) remaining_args = FLAGS(sys.argv) utils.monkey_patch() parser = argparse.ArgumentParser(description="record or play back notification events") parser.add_argument("mode", choices=("record", "replay", "monitor"), help="operating mode") parser.add_argument("data_file", default="msgs.dat", nargs="?", help="the data file to read or write") parser.add_argument("--topic", default="notifications.info", help="the exchange topic to listen for") args = parser.parse_args(remaining_args[1:]) console = logging.StreamHandler(sys.stderr) console.setLevel(logging.DEBUG) formatter = logging.Formatter("%(message)s") console.setFormatter(formatter) root_logger = logging.getLogger("") root_logger.addHandler(console) root_logger.setLevel(logging.DEBUG) connection = rpc.create_connection() try: if args.mode == "replay": with open(args.data_file, "rb") as input: send_messages(connection, args.topic, input) elif args.mode == "record": with open(args.data_file, "wb") as output: record_messages(connection, args.topic, output) elif args.mode == "monitor": monitor_messages(connection, args.topic) finally: connection.close() return 0