def logger(**kwargs): """ Emit log messages to the FI bus. If the fedmsg-relay service is not running at the address specified in the config, then this command will *hang* until that service becomes available. If --message is not specified, this command accepts messages from stdin. Some examples:: $ echo '{"a": 1}' | fedmsg-logger --json-input $ echo "Hai there." | fedmsg-logger --modname=git --topic=repo.update $ fedmsg-logger --message="This is a message." $ fedmsg-logger --message='{"a": 1}' --json-input """ kwargs['active'] = True fedmsg.init(name='relay_inbound', **kwargs) if kwargs.get('logger_message'): _log_message(kwargs, kwargs.get('logger_message')) else: line = sys.stdin.readline() while line: _log_message(kwargs, line.strip()) line = sys.stdin.readline()
def logger(**kwargs): """ Emit log messages to the FI bus. If the fedmsg-relay service is not running at the address specified in fedmsg-config.py, then this command will *hang* until that service becomes available. If --message is not specified, this command accepts messages from stdin. Some examples:: $ echo '{"a": 1}' | fedmsg-logger --json-input $ echo "Hai there." | fedmsg-logger --modname=git --topic=repo.update $ fedmsg-logger --message="This is a message." $ fedmsg-logger --message='{"a": 1}' --json-input """ kwargs["active"] = True kwargs["endpoints"]["relay_inbound"] = kwargs["relay_inbound"] fedmsg.init(name="relay_inbound", **kwargs) if kwargs.get("message", None): _log_message(kwargs, kwargs["message"]) else: line = sys.stdin.readline() while line: _log_message(kwargs, line.strip()) line = sys.stdin.readline()
def tail(**kw): """ Watch the bus. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 fedmsg.init(**kw) # Build a message formatter formatter = lambda s: s if kw['pretty_print']: formatter = lambda s: "\n" + pprint.pformat(s) # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. # TODO -- colors? # TODO -- tabular layout? for name, ep, topic, message in fedmsg.__context._tail_messages(**kw): if '_heartbeat' in topic: continue print name, ep, topic, formatter(message)
def __init__(self, hub): super(BugzillaConsumer, self).__init__(hub) self.config = config = hub.config # Backwards compat. We used to have a self.debug... self.debug = self.log.info products = config.get('bugzilla.products', 'Fedora, Fedora EPEL') self.products = [product.strip() for product in products.split(',')] # First, initialize fedmsg and bugzilla in this thread's context. hostname = socket.gethostname().split('.', 1)[0] fedmsg.init(name='bugzilla.%s' % hostname) url = self.config.get('bugzilla.url', 'https://bugzilla.redhat.com') username = self.config.get('bugzilla.username', None) password = self.config.get('bugzilla.password', None) self.bugzilla = bugzilla.Bugzilla(url=url) if username and password: self.debug("Logging in to %s" % url) self.bugzilla.login(username, password) else: self.debug("No credentials found. Not logging in to %s" % url) self.debug("Initialized bz2fm STOMP consumer.")
def run(self): self.config["active"] = True self.config["name"] = "relay_inbound" fedmsg.init(**self.config) idx = self.config.get("msg_id") if not idx: print("--msg-id is required") sys.exit(1) print("Retrieving %r" % idx) url = self.config["datagrepper_url"] + "/id" resp = requests.get(url, params={"id": idx}) code = resp.status_code if code != 200: print("datagrepper request of %r failed. Status: %r" % (idx, code)) sys.exit(2) msg = resp.json() tokens = msg["topic"].split(".") modname = tokens[3] topic = ".".join(tokens[4:]) print("Broadcasting %r" % idx) fedmsg.publish(modname=modname, topic=topic, msg=msg["msg"]) print("OK.")
def announce(**kwargs): """ Emit an announcement message to the FI bus. Example:: $ echo "Fedora Core 4 has been declared GOLD" | fedmsg-announce \ --link http://fedoraproject.org/news Technically this command is a simpler version of fedmsg-logger that emits on a special topic. It is expected that :term:`routing_policy` is specified such that only restricted parties can issue fedmsg announcements. This command expects its message to come from stdin. """ # This specifies that a special certificate should be used to sign this # message. At the sysadmin level, you are responsible for taking care of # two things: # 1) That the announce cert is readable only by appropriate persons. # 2) That the routing_policy is setup so that "announce.announcement" # messages are valid only if signed by such a certificate. kwargs['cert_prefix'] = "announce" # This just specifies that we should be talking to the fedmsg-relay. kwargs['active'] = True fedmsg.init(name='relay_inbound', **kwargs) # Read in and setup our message. Include the --link, even if it is None. message = "\n".join(map(str.strip, sys.stdin.readlines())) msg = dict(message=message, link=kwargs['link']) # Fire! fedmsg.publish(modname="announce", topic="announcement", msg=msg)
def main(): # Prepare our context and publisher fedmsg.init(name="bodhi.marat") # Probabilities of us emitting an event on each topic. probs = { 'bodhi': 0.015 * FACTOR, 'fedoratagger': 0.001 * FACTOR, 'pkgdb': 0.001 * FACTOR, 'fas': 0.005 * FACTOR, 'mediawiki': 0.01 * FACTOR, 'git': 0.01 * FACTOR, } # Main loop i = 0 while True: for service, thresh in probs.iteritems(): if random.random() < thresh: print service, thresh fedmsg.send_message( topic='fake_data', msg={'test': "Test data." + str(i)}, modname=service, ) i = i + 1 time.sleep(random.random())
def watch(self): config = fedmsg.config.load_config() fedmsg.init(mute=True, **config) fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(): log.debug("received topic: {topic}".format(topic=topic)) if not topic in self.topics: continue log.debug("match topic {topic}=>{data}".format(topic=topic, data=msg['msg'])) pargs = [topic] for parg in self.topics[topic]['args']: if hasattr(parg, '__call__'): # run this as fedmsg.meta function pargs.append(parg(msg, **config)) elif '/' in parg: # this is a dpath expression try: path, val = dpath.util.search(msg, parg, yielded=True).next() pargs.append(val) except StopIteration: log.warning("Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') elif parg in msg: pargs.append(msg[parg]) else: log.warning("Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') self.__run_scripts(self.script_dir, pargs)
def logger(**kwargs): """ Emit log messages to the FI bus. If the fedmsg-relay service is not running at the address specified in the config, then this command will *hang* until that service becomes available. If --message is not specified, this command accepts messages from stdin. Some examples:: $ echo "\"{'a': 1}\"" | fedmsg-logger --json-input $ echo "Hai there." | fedmsg-logger --modname=git --topic=repo.update $ fedmsg-logger --message="This is a message." $ fedmsg-logger --message="\"{'a': 1}\"" --json-input """ kwargs['active'] = True fedmsg.init(name='relay_inbound', **kwargs) if kwargs.get('logger_message'): _log_message(kwargs, kwargs.get('logger_message')) else: line = sys.stdin.readline() while line: _log_message(kwargs, line.strip()) line = sys.stdin.readline()
def init(active=None, cert_prefix=None): """ Initialize fedmsg for publishing. Args: active (bool or None): If True, publish messages to a relay. If False, publish messages to connected consumers. cert_prefix (basestring): Configures the ``cert_prefix`` setting in the fedmsg_config. """ if not bodhi.server.config.config.get('fedmsg_enabled'): bodhi.server.log.warning("fedmsg disabled. not initializing.") return fedmsg_config = fedmsg.config.load_config() # Only override config from disk if explicitly argued. if active is not None: fedmsg_config['active'] = active fedmsg_config['name'] = 'relay_inbound' else: hostname = socket.gethostname().split('.', 1)[0] fedmsg_config['name'] = 'bodhi.%s' % hostname if cert_prefix is not None: fedmsg_config['cert_prefix'] = cert_prefix fedmsg.init(**fedmsg_config) bodhi.server.log.info("fedmsg initialized")
def run(shmelf): config = copy.deepcopy(self.config) import fedmsg fedmsg.init(**config) fedmsg.publish(topic=self.topic, msg=secret, modname="threadtest")
def __init__(self, *args, **kwargs): super(FedmsgPlugin, self).__init__(*args, **kwargs) # If fedmsg was already initialized, let's not re-do that. if not getattr(getattr(fedmsg, '__local', None), '__context', None): config = fedmsg.config.load_config() config['active'] = True fedmsg.init(name='relay_inbound', cert_prefix='trac', **config)
def __init__(self, topic, feed): self.feed = feed self.topic = topic try: fedmsg.init() except ValueError: # Indicates fedmsg is already initialized pass
def __init__(self): config = fedmsg.config.load_config() config.update(dict( name='relay_inbound', cert_prefix='shell', active=True, )) fedmsg.init(**config)
def __init__(self, log=None): # Hack to not require opts argument for now. opts = type('', (), {}) opts.headers = {} super(MsgBusFedmsg, self).__init__(opts, log) fedmsg.init(name='relay_inbound', cert_prefix='copr', active=True)
def tail(**kw): """ Watch all endpoints on the bus and print each message to stdout. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a name to # conform with the other commands. kw['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings about # having no publishing sockets established. kw['mute'] = True fedmsg.init(**kw) fedmsg.text.make_processors(**kw) # Build a message formatter formatter = lambda d: d if kw['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if kw['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter()).strip() return "\n" + fancy if kw['terse']: formatter = lambda d: "\n" + fedmsg.text.msg2repr(d, **kw) exclusive_regexp = re.compile(kw['exclusive_regexp']) inclusive_regexp = re.compile(kw['inclusive_regexp']) # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. for name, ep, topic, message in fedmsg.tail_messages(**kw): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue print name, ep, topic, formatter(message)
def tail(**kw): """ Watch all endpoints on the bus and print each message to stdout. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a name to # conform with the other commands. kw['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings about # having no publishing sockets established. kw['mute'] = True fedmsg.init(**kw) fedmsg.text.make_processors(**kw) # Build a message formatter formatter = lambda d: d if kw['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if kw['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy if kw['terse']: formatter = lambda d: "\n" + fedmsg.text.msg2repr(d, **kw) exclusive_regexp = re.compile(kw['exclusive_regexp']) inclusive_regexp = re.compile(kw['inclusive_regexp']) # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. for name, ep, topic, message in fedmsg.tail_messages(**kw): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue print name, ep, topic, formatter(message)
def status(**kwargs): """ Check the status of nodes on the bus. """ # Disable sending fedmsg.init(**kwargs) status = fedmsg.have_pulses(**kwargs) for endpoint, success in status.iteritems(): print "[%s] %s" % (_colorize(success), endpoint)
def fedmsg_init(): try: import fedmsg import fedmsg.config except ImportError: warnings.warn("fedmsg ImportError") return config = fedmsg.config.load_config() config['active'] = True fedmsg.init(**config)
def init_fedmsg(self): """ Initialize Fedmsg (this assumes there are certs and a fedmsg config on disk). """ if not (self.opts.fedmsg_enabled and fedmsg): return try: fedmsg.init(name="relay_inbound", cert_prefix="copr", active=True) except Exception as e: self.log.exception("Failed to initialize fedmsg: {}".format(e))
def run(self): self.config['active'] = True self.config['name'] = 'relay_inbound' fedmsg.init(**self.config) if self.config.get('logger_message'): self._log_message(self.config, self.config.get('logger_message')) else: line = sys.stdin.readline() while line: self._log_message(self.config, line.strip()) line = sys.stdin.readline()
def fedmsg_init(): try: import fedmsg import fedmsg.config except ImportError: warnings.warn("fedmsg ImportError") return config = fedmsg.config.load_config() config['active'] = True config['name'] = 'relay_inbound' config['cert_prefix'] = 'fedocal' fedmsg.init(**config)
def init_fedmsg(self): """ Initialize Fedmsg (this assumes there are certs and a fedmsg config on disk) """ if not (self.opts.fedmsg_enabled and fedmsg): return try: fedmsg.init(name="relay_inbound", cert_prefix="copr", active=True) except Exception as e: self.log.exception("Failed to initialize fedmsg: {}".format(e))
def __init__(self): config = fedmsg.config.load_config() config.update(dict( name='relay_inbound', cert_prefix='shell', active=True, )) # It seems like recursive playbooks call this over and over again and # fedmsg doesn't like to be initialized more than once. So, here, just # catch that and ignore it. try: fedmsg.init(**config) except ValueError: pass
def fedmsg_init(): """ Instanciate fedmsg """ try: import fedmsg import fedmsg.config except ImportError: warnings.warn("fedmsg ImportError") return config = fedmsg.config.load_config() config['active'] = True config['name'] = 'relay_inbound' config['cert_prefix'] = 'fedocal' fedmsg.init(**config)
def main(self): try: for _, _, topic, msg in fedmsg.tail_messages(): self.notify_watchdog() try: if topic.startswith(get_config('fedmsg.topic') + '.'): self.consume(topic, msg) plugin.dispatch_event('fedmsg_event', topic, msg, db=self.db, koji_sessions=self.koji_sessions) finally: self.db.rollback() except requests.exceptions.ConnectionError: self.log.exception("Fedmsg watcher exception.") fedmsg.destroy() fedmsg.init()
def __init__(self, irc): super(Fedmsg, self).__init__(irc) # If fedmsg was already initialized, let's not re-do that. if getattr(getattr(fedmsg, '__local', None), '__context', None): print "Not reinitializing fedmsg." else: # Initialize fedmsg resources. hostname = socket.gethostname().split('.', 1)[0] fedmsg.init(name="supybot." + hostname) # Launch in a thread to duckpunch *after* the other plugins # have been set up. thread = Injector() thread.start()
def init(active=None, cert_prefix=None): if not bodhi.config.config.get('fedmsg_enabled'): bodhi.log.warn("fedmsg disabled. not initializing.") return fedmsg_config = fedmsg.config.load_config() # Only override config from disk if explicitly argued. if active is not None: fedmsg_config['active'] = active fedmsg_config['name'] = 'relay_inbound' if cert_prefix is not None: fedmsg_config['cert_prefix'] = cert_prefix fedmsg.init(**fedmsg_config) bodhi.log.info("fedmsg initialized")
def fedmsg_publish(msg, modname=C.DEFAULT_FEDMSG_MODNAME, topic=C.DEFAULT_FEDMSG_TOPIC): ''' publish message to fedmsg bus :param msg: JSON message :param modname: Message topic name :param topic: Message topic ''' if not HAVE_FEDMSG: return hostname = socket.gethostname().split('.', 1)[0] fedmsg.init(name="modname.{0}".format(hostname)) fedmsg.publish( modname=modname, topic=topic, msg=msg, )
def send_fedmsg_notifications(project, topic, msg): ''' If the user asked for fedmsg notifications on commit, this will do it. ''' import fedmsg config = fedmsg.config.load_config([], None) config['active'] = True config['endpoints']['relay_inbound'] = config['relay_inbound'] fedmsg.init(name='relay_inbound', **config) pagure.lib.notify.log( project=project, topic=topic, msg=msg, redis=None, # web-hook notification are handled separately )
def __init__(self, hub): super(BugzillaTicketFiler, self).__init__(hub) if not self._initialized: return # This is just convenient. self.config = self.hub.config # First, initialize fedmsg and bugzilla in this thread's context. hostname = socket.gethostname().split('.', 1)[0] fedmsg.init(name='hotness.%s' % hostname) fedmsg.meta.make_processors(**self.hub.config) self.bugzilla = hotness.bz.Bugzilla( consumer=self, config=self.config['hotness.bugzilla']) self.buildsys = hotness.buildsys.Koji( consumer=self, config=self.config['hotness.koji']) default = 'https://admin.fedoraproject.org/pkgdb/api' self.pkgdb_url = self.config.get('hotness.pkgdb_url', default) anitya_config = self.config.get('hotness.anitya', {}) default = 'https://release-monitoring.org' self.anitya_url = anitya_config.get('url', default) self.anitya_username = anitya_config.get('username', default) self.anitya_password = anitya_config.get('password', default) # Also, set up our global cache object. self.log.info("Configuring cache.") with hotness.cache.cache_lock: if not hasattr(hotness.cache.cache, 'backend'): hotness.cache.cache.configure(**self.config['hotness.cache']) self.yumconfig = self.config.get('hotness.yumconfig') self.log.info("Using hotness.yumconfig=%r" % self.yumconfig) self.repoid = self.config.get('hotness.repoid', 'rawhide') self.log.info("Using hotness.repoid=%r" % self.repoid) self.distro = self.config.get('hotness.distro', 'Fedora') self.log.info("Using hotness.distro=%r" % self.distro) # Build a little store where we'll keep track of what koji scratch # builds we have kicked off. We'll look later for messages indicating # that they have completed. self.triggered_task_ids = {} self.log.info("That new hotness ticket filer is all initialized")
def send_fedmsg_notifications(project, topic, msg): """ If the user asked for fedmsg notifications on commit, this will do it. """ import fedmsg config = fedmsg.config.load_config([], None) config["active"] = True config["endpoints"]["relay_inbound"] = config["relay_inbound"] fedmsg.init(name="relay_inbound", **config) pagure.lib.notify.log( project=project, topic=topic, msg=msg, redis=None, # web-hook notification are handled separately )
def main(self): try: for _, _, topic, msg in fedmsg.tail_messages(): self.notify_watchdog() try: if topic.startswith(self.topic_name + '.'): self.consume(topic, msg) plugin.dispatch_event('fedmsg_event', topic, msg, db=self.db, koji_session=self.koji_session) finally: self.db.rollback() except requests.exceptions.ConnectionError: self.log.exception("Fedmsg watcher exception.") fedmsg.destroy() fedmsg.init()
def run(self): # This specifies that a special certificate should be used to sign this # message. At the sysadmin level, you are responsible for taking care # of two things: # 1) That the announce cert is readable only by appropriate persons. # 2) That the routing_policy is setup so that "announce.announcement" # messages are valid only if signed by such a certificate. self.config['cert_prefix'] = "announce" # This just specifies that we should be talking to the fedmsg-relay. self.config['active'] = True self.config['name'] = 'relay_inbound' fedmsg.init(**self.config) # Read in and setup our message. Include --link, even if it is None. message = "\n".join(map(str.strip, sys.stdin.readlines())) msg = dict(message=message, link=self.config['link']) # Fire! fedmsg.publish(modname="announce", topic="announcement", msg=msg)
def send_fedmsg_notifications(project, topic, msg): """ If the user or admin asked for fedmsg notifications on commit, this will do it. """ fedmsg_hook = pagure.lib.plugins.get_plugin("Fedmsg") fedmsg_hook.db_object() always_fedmsg = _config.get("ALWAYS_FEDMSG_ON_COMMITS") or None # Send fedmsg and fedora-messaging notification # (if fedmsg and fedora-messaging are there and set-up) if always_fedmsg or (project.fedmsg_hook and project.fedmsg_hook.active): if _config.get("FEDMSG_NOTIFICATIONS", True): try: global FEDMSG_INIT print(" - to fedmsg") import fedmsg config = fedmsg.config.load_config([], None) config["active"] = True config["endpoints"]["relay_inbound"] = config["relay_inbound"] if not FEDMSG_INIT: fedmsg.init(name="relay_inbound", **config) FEDMSG_INIT = True pagure.lib.notify.fedmsg_publish(topic=topic, msg=msg) except Exception: _log.exception( "Error sending fedmsg notifications on commit push" ) if _config.get("FEDORA_MESSAGING_NOTIFICATIONS", False): try: print(" - to fedora-message") pagure.lib.notify.fedora_messaging_publish(topic, msg) except Exception: _log.exception( "Error sending fedora-messaging notifications on " "commit push" )
def send_fedmsg_notifications(project, topic, msg): """ If the user or admin asked for fedmsg notifications on commit, this will do it. """ fedmsg_hook = pagure.lib.plugins.get_plugin("Fedmsg") fedmsg_hook.db_object() always_fedmsg = _config.get("ALWAYS_FEDMSG_ON_COMMITS") or None # Send fedmsg and fedora-messaging notification # (if fedmsg and fedora-messaging are there and set-up) if always_fedmsg or (project.fedmsg_hook and project.fedmsg_hook.active): if _config.get("FEDMSG_NOTIFICATIONS", True): try: global FEDMSG_INIT print(" - to fedmsg") import fedmsg config = fedmsg.config.load_config([], None) config["active"] = True config["endpoints"]["relay_inbound"] = config["relay_inbound"] if not FEDMSG_INIT: fedmsg.init(name="relay_inbound", **config) FEDMSG_INIT = True pagure.lib.notify.fedmsg_publish(topic=topic, msg=msg) except Exception: _log.exception( "Error sending fedmsg notifications on commit push") if _config.get("FEDORA_MESSAGING_NOTIFICATIONS", False): try: print(" - to fedora-message") pagure.lib.notify.fedora_messaging_publish(topic, msg) except Exception: _log.exception( "Error sending fedora-messaging notifications on " "commit push")
def watch(self): config = fedmsg.config.load_config() fedmsg.init(mute=True, **config) fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(): log.debug("received topic: {topic}".format(topic=topic)) if not topic in self.topics: continue log.debug("match topic {topic}=>{data}".format(topic=topic, data=msg['msg'])) pargs = [topic] for parg in self.topics[topic]['args']: if hasattr(parg, '__call__'): # run this as fedmsg.meta function pargs.append(parg(msg, **config)) elif '/' in parg: # this is a dpath expression try: path, val = dpath.util.search(msg, parg, yielded=True).next() pargs.append(val) except StopIteration: log.warning( "Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') elif parg in msg: pargs.append(msg[parg]) else: log.warning( "Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') self.__run_scripts(self.script_dir, pargs)
def run(self): # This is a "required" option... :P if not self.config['command']: self.log.error("You must provide a --command to run.") sys.exit(1) # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-trigger won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue result = self.run_command(self.config['command'], message) if result != 0: self.log.info("Command returned error code %r" % result)
def configure(self, config): hostname = socket.gethostname().split('.')[0] global fedmsg_config fedmsg_config = conv.check(conv.struct(dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), conv.default('dev'), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan'), ), name = conv.pipe( conv.empty_to_none, conv.default('ckan.{}'.format(hostname)), ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.not_none, ), )))(dict( (key[len('fedmsg.'):], value) for key, value in config.iteritems() if key.startswith('fedmsg.') )) #fedmsg.init(**fedmsg_config) fedmsg.init(active = True, name = 'relay_inbound', **dict( (key, value) for key, value in fedmsg_config.iteritems() if key != 'name' and value is not None ))
def logger(**kwargs): """ Emit log messages to the FI bus. If the fedmsg-relay service is not running at the address specified in the config, then this command will *hang* until that service becomes available. If --message is not specified, this command accepts messages from stdin. Some examples:: $ echo '{"a": 1}; | fedmsg-logger --json-input $ echo "Hai there." | fedmsg-logger --modname=git --topic=repo.update $ fedmsg-logger --message="This is a message." $ fedmsg-logger --message='{"a": 1}' --json-input Note that the python JSON parser is picky about the format of messages if you're using the --json-input option. Double-quotes must be on the "inside" of the string and single quotes must be on the outside:: '{"a": 1}' is good. "{'a': 1}" is bad. """ kwargs['active'] = True fedmsg.init(name='relay_inbound', **kwargs) if kwargs.get('logger_message'): _log_message(kwargs, kwargs.get('logger_message')) else: line = sys.stdin.readline() while line: _log_message(kwargs, line.strip()) line = sys.stdin.readline()
def tail(**kw): """ Watch the bus. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 fedmsg.init(**kw) # Build a message formatter formatter = lambda d: d if kw['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) return "\n" + pprint.pformat(d) if kw['really_pretty']: def formatter(d): fancy = pygments.highlight( fedmsg.json.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. # TODO -- colors? # TODO -- tabular layout? for name, ep, topic, message in fedmsg.__context._tail_messages(**kw): if '_heartbeat' in topic: continue print name, ep, topic, formatter(message)
def tail(**kw): """ Watch the bus. """ # Disable sending kw["publish_endpoint"] = None # Disable timeouts. We want to tail forever! kw["timeout"] = 0 fedmsg.init(**kw) # Build a message formatter formatter = lambda d: d if kw["pretty"]: def formatter(d): d["timestamp"] = time.ctime(d["timestamp"]) return "\n" + pprint.pformat(d) if kw["really_pretty"]: def formatter(d): fancy = pygments.highlight( fedmsg.json.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. # TODO -- colors? # TODO -- tabular layout? for name, ep, topic, message in fedmsg.__context._tail_messages(**kw): if "_heartbeat" in topic: continue print name, ep, topic, formatter(message)
def logger(**kwargs): """ Emit log messages to the FI bus. If the fedmsg-relay service is not running at the address specified in the config, then this command will *hang* until that service becomes available. If --message is not specified, this command accepts messages from stdin. Some examples:: $ echo '{"a": 1}; | fedmsg-logger --json-input $ echo "Hai there." | fedmsg-logger --modname=git --topic=repo.update $ fedmsg-logger --message="This is a message." $ fedmsg-logger --message='{"a": 1}' --json-input Note that the python JSON parser is picky about the format of messages if you're using the --json-input option. Double-quotes must be on the "inside" of the string and single quotes must be on the outside:: '{"a": 1}' is good. "{'a': 1}" is bad. """ kwargs["active"] = True fedmsg.init(name="relay_inbound", **kwargs) if kwargs.get("logger_message"): _log_message(kwargs, kwargs.get("logger_message")) else: line = sys.stdin.readline() while line: _log_message(kwargs, line.strip()) line = sys.stdin.readline()
def run(shmelf): config = copy.deepcopy(self.config) import fedmsg fedmsg.init(**config) fedmsg.destroy() fedmsg.init(**config) fedmsg.destroy() fedmsg.init(**config) fedmsg.destroy() self.test_reinit_success = True
def run(self): # This is a "required" option... :P if not self.config['command']: self.log.error("You must provide a --command to run.") sys.exit(1) # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-trigger won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) wait_for = int(self.config['wait_for']) max_queue_size = int(self.config['max_queue_size']) timer = None que = queue.Queue() def execute_queue(): while not que.empty(): message = que.get() result = self.run_command(self.config['command'], message) if result != 0: self.log.info("Command returned error code %r" % result) try: for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue que.put(message) if timer is not None: # Try to cancel it timer.cancel() if timer is None or not timer.is_alive(): # Either there was no timer yet, or it was still waiting # -> Let's start a new one if (max_queue_size > 0 and que.qsize() > max_queue_size): # If the que is too big, let's just run it NOW timer = threading.Timer(0, execute_queue) else: timer = threading.Timer(wait_for, execute_queue) timer.start() except KeyboardInterrupt: if timer is not None: timer.cancel() # Let's wait for commands to finish timer.join()
def run(self, cmdline, db): levels = tuple(10**n for n in range(7)) fedmsg.init(name=self.fedmsg_name, environment=self.fedmsg_environment) if cmdline.reports: # Sum of counts until yesterday q_yesterday = ( db.session .query(Report.id.label("y_report_id"), func.sum(ReportHistoryDaily.count).label("sum_yesterday")) .outerjoin(ReportHistoryDaily) .filter(ReportHistoryDaily.day < cmdline.date) .group_by(Report.id) .subquery() ) # Sum of counts until today q_today = ( db.session .query(Report.id.label("t_report_id"), func.sum(ReportHistoryDaily.count).label("sum_today")) .outerjoin(ReportHistoryDaily) .filter(ReportHistoryDaily.day <= cmdline.date) .group_by(Report.id) .subquery() ) q = (db.session.query(Report, q_today.c.sum_today, q_yesterday.c.sum_yesterday) .outerjoin(q_today, Report.id == q_today.c.t_report_id) .outerjoin(q_yesterday, Report.id == q_yesterday.c.y_report_id) .filter(or_(Report.max_certainty.isnot(None), Report.max_certainty != 100)) .filter(or_(and_(q_yesterday.c.sum_yesterday == None, q_today.c.sum_today != None), q_today.c.sum_today != q_yesterday.c.sum_yesterday)) ) for db_report, sum_today, sum_yesterday in q.yield_per(100): # avoid None sum_yesterday = sum_yesterday or 0 for level in levels: if sum_yesterday < level and sum_today >= level: self.log_info("Notifying about report #{0} level {1}" .format(db_report.id, level)) msg = { "report_id": db_report.id, "function": db_report.crash_function, "components": [db_report.component.name], "first_occurrence": db_report.first_occurrence .strftime("%Y-%m-%d"), "count": sum_today, "type": db_report.type, "level": level, } if web.webfaf_installed(): msg["url"] = web.reverse("reports.item", report_id=db_report.id) if db_report.problem_id: msg["problem_id"] = db_report.problem_id fedmsg.publish( topic="report.threshold{0}".format(level), modname='faf', msg=msg) if cmdline.problems: # Sum of counts until yesterday q_yesterday = ( db.session .query(Problem.id.label("y_problem_id"), func.sum(ReportHistoryDaily.count).label("sum_yesterday")) .join(Report) .outerjoin(ReportHistoryDaily) .filter(ReportHistoryDaily.day < cmdline.date) .group_by(Problem.id) .subquery() ) # Sum of counts until today q_today = ( db.session .query(Problem.id.label("t_problem_id"), func.sum(ReportHistoryDaily.count).label("sum_today")) .join(Report) .outerjoin(ReportHistoryDaily) .filter(ReportHistoryDaily.day <= cmdline.date) .group_by(Problem.id) .subquery() ) q = (db.session .query(Problem, q_today.c.sum_today, q_yesterday.c.sum_yesterday) .outerjoin(q_today, Problem.id == q_today.c.t_problem_id) .outerjoin(q_yesterday, Problem.id == q_yesterday.c.y_problem_id) .filter(or_(and_(q_yesterday.c.sum_yesterday == None, q_today.c.sum_today != None), q_today.c.sum_today != q_yesterday.c.sum_yesterday)) ) for db_problem, sum_today, sum_yesterday in q.yield_per(100): # avoid None sum_yesterday = sum_yesterday or 0 for level in levels: if sum_yesterday < level and sum_today >= level: self.log_info("Notifying about problem #{0} level {1}" .format(db_problem.id, level)) msg = { "problem_id": db_problem.id, "function": db_problem.crash_function, "components": db_problem.unique_component_names, "first_occurrence": db_problem.first_occurrence .strftime("%Y-%m-%d"), "count": sum_today, "type": db_problem.type, "level": level, } if web.webfaf_installed(): msg["url"] = web.reverse("problems.item", problem_id=db_problem.id) fedmsg.publish( topic="problem.threshold{0}".format(level), modname='faf', msg=msg)
if 'PAGURE_CONFIG' not in os.environ \ and os.path.exists('/etc/pagure/pagure.cfg'): os.environ['PAGURE_CONFIG'] = '/etc/pagure/pagure.cfg' import pagure # noqa: E402 import pagure.lib.git # noqa: E402 abspath = os.path.abspath(os.environ['GIT_DIR']) print("Emitting a message to the fedmsg bus.") config = fedmsg.config.load_config([], None) config['active'] = True config['endpoints']['relay_inbound'] = config['relay_inbound'] fedmsg.init(name='relay_inbound', **config) seen = [] # Read in all the rev information git-receive-pack hands us. for line in sys.stdin.readlines(): (oldrev, newrev, refname) = line.strip().split(' ', 2) forced = False if set(newrev) == set(['0']): print("Deleting a reference/branch, so we won't run the " "pagure hook") break elif set(oldrev) == set(['0']): print("New reference/branch") oldrev = '^%s' % oldrev