def handle(self, *args, **options): logger.info('Listening for package updates...') for name, endpoint, topic, msg in fedmsg.tail_messages(): try: if topic.endswith('package.modified') or \ topic.endswith('package.removed') or \ topic.endswith('package.added'): pkg = msg['msg']['package'] repoinfo_id = msg['msg'].get('upstream') for package in Package.objects.filter( name=pkg['name'], arch=pkg['arch']): if package.version != pkg['version'] or \ package.release != pkg['release']: logger.info('Package changed: %s %s' % (pkg, package)) images_qs = package.images.all() if repoinfo_id is not None: images_qs = images_qs.filter( repoinfo__id=repoinfo_id) images_qs.update(to_build=True) logger.info('Images marked for build: %s' % images_qs) except Exception as e: logger.critical('pkgupdatelistener errored out: %s' % e, exc_info=True) sys.exit(1)
def listen(config): """ Listens to activity on upstream repos on pagure and github \ via fedmsg, and syncs new issues there to the JIRA instance \ defined in 'fedmsg.d/sync2jira.py' :param Dict config: Config dict :returns: Nothing """ if not config['sync2jira'].get('listen'): log.info("`listen` is disabled. Exiting.") return log.info("Waiting for a relevant fedmsg message to arrive...") for _, _, topic, msg in fedmsg.tail_messages(**config): idx = msg['msg_id'] suffix = ".".join(topic.split('.')[3:]) log.debug("Encountered %r %r %r", suffix, topic, idx) if suffix not in issue_handlers and suffix not in pr_handlers: continue log.debug("Handling %r %r %r", suffix, topic, idx) handle_msg(msg, suffix, config)
def main(): fedmsg_config = fedmsg.config.load_config() dictConfig(fedmsg_config.get('logging', {'version': 1})) log.info("Listening to the bus via fedmsg.tail_messages()") for _, _, topic, msg in fedmsg.tail_messages(): # XXX - if you want to debug whether or not this is receiving fedmsg # messages, you can put a print statement here, before the 'continue' # statement. if not topic.endswith(target): continue log.info("A meeting just ended! Sleeping 2s. %r" % msg.get('msg_id')) time.sleep(2) teams_cmd = "/usr/local/bin/meetings_by_team.sh" log.info("Running %r" % teams_cmd) proc = sp.Popen(teams_cmd.split(), stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = proc.communicate() if proc.returncode: # Calling log.error in fedora infrastructure with fedmsg logging # configured, should send an email to the sysadmin-datanommer # group. log.error("Error %r running %r.\n STDOUT: %s\n STDERR: %s" % ( proc.returncode, teams_cmd, stdout, stderr)) log.info("Running soke.run()...") soke.run() log.info("Done.")
def watch(self): config = fedmsg.config.load_config() fedmsg.init(mute=True, **config) fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(): log.debug("received topic: {topic}".format(topic=topic)) if not topic in self.topics: continue log.debug("match topic {topic}=>{data}".format(topic=topic, data=msg['msg'])) pargs = [topic] for parg in self.topics[topic]['args']: if hasattr(parg, '__call__'): # run this as fedmsg.meta function pargs.append(parg(msg, **config)) elif '/' in parg: # this is a dpath expression try: path, val = dpath.util.search(msg, parg, yielded=True).next() pargs.append(val) except StopIteration: log.warning("Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') elif parg in msg: pargs.append(msg[parg]) else: log.warning("Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') self.__run_scripts(self.script_dir, pargs)
def main(): fedmsg_config = fedmsg.config.load_config() dictConfig(fedmsg_config.get('logging', {'version': 1})) log.info("Listening to the bus via fedmsg.tail_messages()") for _, _, topic, msg in fedmsg.tail_messages(): # XXX - if you want to debug whether or not this is receiving fedmsg # messages, you can put a print statement here, before the 'continue' # statement. if not topic.endswith(target): continue log.info("A meeting just ended! Sleeping 2s. %r" % msg.get('msg_id')) time.sleep(2) teams_cmd = "/usr/local/bin/meetings_by_team.sh" log.info("Running %r" % teams_cmd) proc = sp.Popen(teams_cmd.split(), stdout=sp.PIPE, stderr=sp.PIPE) stdout, stderr = proc.communicate() if proc.returncode: # Calling log.error in fedora infrastructure with fedmsg logging # configured, should send an email to the sysadmin-datanommer # group. log.error("Error %r running %r.\n STDOUT: %s\n STDERR: %s" % (proc.returncode, teams_cmd, stdout, stderr)) log.info("Running soke.run()...") soke.run() log.info("Done.")
def tail(**kw): """ Watch all endpoints on the bus and print each message to stdout. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a name to # conform with the other commands. kw['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings about # having no publishing sockets established. kw['mute'] = True fedmsg.init(**kw) fedmsg.text.make_processors(**kw) # Build a message formatter formatter = lambda d: d if kw['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if kw['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter()).strip() return "\n" + fancy if kw['terse']: formatter = lambda d: "\n" + fedmsg.text.msg2repr(d, **kw) exclusive_regexp = re.compile(kw['exclusive_regexp']) inclusive_regexp = re.compile(kw['inclusive_regexp']) # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. for name, ep, topic, message in fedmsg.tail_messages(**kw): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue print name, ep, topic, formatter(message)
def tail(**kw): """ Watch all endpoints on the bus and print each message to stdout. """ # Disable sending kw['publish_endpoint'] = None # Disable timeouts. We want to tail forever! kw['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a name to # conform with the other commands. kw['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings about # having no publishing sockets established. kw['mute'] = True fedmsg.init(**kw) fedmsg.text.make_processors(**kw) # Build a message formatter formatter = lambda d: d if kw['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if kw['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy if kw['terse']: formatter = lambda d: "\n" + fedmsg.text.msg2repr(d, **kw) exclusive_regexp = re.compile(kw['exclusive_regexp']) inclusive_regexp = re.compile(kw['inclusive_regexp']) # The "proper" fedmsg way to do this would be to spin up or connect to an # existing Moksha Hub and register a consumer on the "*" topic that simply # prints out each message it consumes. That seems like overkill, so we're # just going to directly access the endpoints ourself. for name, ep, topic, message in fedmsg.tail_messages(**kw): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue print name, ep, topic, formatter(message)
def main(self): for _, _, topic, msg in fedmsg.tail_messages(): self.notify_watchdog() try: if topic.startswith(get_config('fedmsg.topic') + '.'): self.consume(topic, msg) plugin.dispatch_event('fedmsg_event', self.session, topic, msg) finally: self.db.rollback() self.memory_check()
def main(): tweet = twitter_utils.Tweet() config = fedmsg.config.load_config([], None) fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(): #currently only few topics are frequented #so sending tweets without filtering on topics print "Sending Tweet" title = fedmsg.meta.msg2title(msg, **config) tweet.send_tweet("Msg recieved on bus is titled:%s" % title)
def listen(config): """ Listens to activity on upstream repos on pagure and github \ via fedmsg, and syncs new issues there to the JIRA instance \ defined in 'fedmsg.d/sync2jira.py' :param Dict config: Config dict :returns: Nothing """ if not config['sync2jira'].get('listen'): log.info("`listen` is disabled. Exiting.") return log.info("Waiting for a relevant fedmsg message to arrive...") for _, _, topic, msg in fedmsg.tail_messages(**config): idx = msg['msg_id'] suffix = ".".join(topic.split('.')[3:]) log.debug("Encountered %r %r %r", suffix, topic, idx) if suffix not in issue_handlers and suffix not in pr_handlers: continue log.debug("Handling %r %r %r", suffix, topic, idx) issue = None pr = None # Github '.issue.' is used for both PR and Issue # Check for that edge case if suffix == 'github.issue.comment': if 'pull_request' in msg['msg'][ 'issue'] and msg['msg']['action'] != 'deleted': # pr_filter turns on/off the filtering of PRs pr = issue_handlers[suffix](msg, config, pr_filter=False) if not pr: continue # Issues do not have suffix and reporter needs to be reformatted pr.suffix = suffix pr.reporter = pr.reporter.get('fullname') setattr(pr, 'match', matcher(pr.content, pr.comments)) else: issue = issue_handlers[suffix](msg, config) elif suffix in issue_handlers: issue = issue_handlers[suffix](msg, config) elif suffix in pr_handlers: pr = pr_handlers[suffix](msg, config, suffix) if not issue and not pr: continue if issue: d_issue.sync_with_jira(issue, config) elif pr: d_pr.sync_with_jira(pr, config)
def iterate_dg_pr_flags() -> Iterable[Tuple[str, dict]]: """ Provide messages when a flag is added to a pull request in dist-git :return: tuple, (full topic name, dict with the message) """ # we can watch for runs directly: # "org.centos.prod.ci.pipeline.allpackages.complete" topic = "org.fedoraproject.prod.pagure.pull-request.flag.added" logger.info("listening on fedmsg, topic=%s", topic) for name, endpoint, topic, msg in fedmsg.tail_messages(topic=topic): yield topic, msg
def main(self): try: for _, _, topic, msg in fedmsg.tail_messages(): self.notify_watchdog() try: if topic.startswith(get_config('fedmsg.topic') + '.'): self.consume(topic, msg) plugin.dispatch_event('fedmsg_event', topic, msg, db=self.db, koji_sessions=self.koji_sessions) finally: self.db.rollback() except requests.exceptions.ConnectionError: self.log.exception("Fedmsg watcher exception.") fedmsg.destroy() fedmsg.init()
def main(): # Fedmsg and Elasticsearch clients fedmsg_config = fedmsg.config.load_config() es = Elasticsearch(hosts=[{'host': config_es_url, 'port': config_es_port}]) # Reading Fedmsg messages for name, endpoint, topic, msg in fedmsg.tail_messages(**fedmsg_config): log("\n\n==============================================\n") log("Fedmsg received!\n") log("Time: {}".format(datetime.now())) log("Topic: {}".format(topic)) log("Message:\n{}\n".format(msg)) if topic == config_topic_module_state_change: log ("Module state change: {}\n".format(topic)) action_module_state_change(msg["msg"], es)
def iterate_pull_requests() -> Iterable[Tuple[str, str, dict]]: """ Provide messages for all github pull-request-related events Actions: https://developer.github.com/v3/activity/events/types/#events-api-payload-28 :return: tuple, (full topic name, pull request action, dict with the message) """ # https://github.com/fedora-infra/github2fedmsg/blob/a9c178b93aa6890e6b050e5f1c5e3297ceca463c/github2fedmsg/views/webhooks.py#L120 topic_pre = "org.fedoraproject.prod.github.pull_request." for name, endpoint, topic, msg in fedmsg.tail_messages(): # logger.debug("new message: %s", topic) # average load is about 5 messages a second if topic.startswith(topic_pre): logger.info("process message: %s", topic) action = topic.rsplit(".", 1)[1] yield topic, action, msg
def run(self): # fedmsg configuration config = fedmsg.config.load_config([], None) config['mute'] = True config['timeout'] = 0 # parse the koji-shadow configuration to ignore packages we know # we don't care about. ignorelist = (ks_config.get('rules', 'excludelist').split() + ks_config.get('rules', 'ignorelist').split()) logger.debug('Monitoring fedmsg.') for name, endpoint, topic, msg in fedmsg.tail_messages(**config): if (msg['topic'] == 'org.fedoraproject.prod.buildsys.build.state.change' and msg['msg']['new'] == 1 and msg['msg']['name'] not in ignorelist): buildqueue.append(msg['msg']['name']+'-'+msg['msg']['version']+'-'+msg['msg']['release'])
def main(self): try: for _, _, topic, msg in fedmsg.tail_messages(): self.notify_watchdog() try: if topic.startswith(self.topic_name + '.'): self.consume(topic, msg) plugin.dispatch_event('fedmsg_event', topic, msg, db=self.db, koji_session=self.koji_session) finally: self.db.rollback() except requests.exceptions.ConnectionError: self.log.exception("Fedmsg watcher exception.") fedmsg.destroy() fedmsg.init()
def run(self): # This is a "required" option... :P if not self.config['command']: self.log.error("You must provide a --command to run.") sys.exit(1) # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-trigger won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue result = self.run_command(self.config['command'], message) if result != 0: self.log.info("Command returned error code %r" % result)
def watch(self): config = fedmsg.config.load_config() fedmsg.init(mute=True, **config) fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(): log.debug("received topic: {topic}".format(topic=topic)) if not topic in self.topics: continue log.debug("match topic {topic}=>{data}".format(topic=topic, data=msg['msg'])) pargs = [topic] for parg in self.topics[topic]['args']: if hasattr(parg, '__call__'): # run this as fedmsg.meta function pargs.append(parg(msg, **config)) elif '/' in parg: # this is a dpath expression try: path, val = dpath.util.search(msg, parg, yielded=True).next() pargs.append(val) except StopIteration: log.warning( "Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') elif parg in msg: pargs.append(msg[parg]) else: log.warning( "Path {parg} does not exist in {topic}. Substituting empty string" .format(parg=parg, topic=topic)) pargs.append('') self.__run_scripts(self.script_dir, pargs)
def run(self): self.sanity_check() ack_topic = '.'.join([ self.c['topic_prefix'], self.c['environment'], 'gilmsg.ack', ]) # Go into a loop, receiving gilmsg ACK messages from the fedmsg bus for n, e, t, msg in fedmsg.tail_messages(**self.c): # Did we run out of time? if self.time_is_up: return # Throw away anything that's not an ACK produced by a consumer. if t != ack_topic: continue # We should only know about ACKs. Is this an ACK for *ours*? if not msg['msg']['ack_msg_id'] == self.msg_id: continue # We can declare that multiple other systems *must* receive our # message. Write down who is acking this one. for signer in self.expectations: if not fedmsg.crypto.validate_signed_by(msg, signer, **self.c): continue self.results.append(signer) # Check to see if we collected all the pokemon. # If all the people that we wanted to get the message have sent # us ACKs for this message_id, then hooray! If not, go back into # tail_messages to wait for more ACKs. The clock is ticking.... if set(self.results) == set(self.expectations): return
def looper(self): """ Implementation of the generator to feed the event loop """ for name, endpoint, topic, msg in fedmsg.tail_messages(mute=True): yield (topic, dict(msg))
def run(self): # First, sanity checking. if not self.config.get('tweet_endpoints', None): raise ValueError("Not configured to tweet.") # Boilerplate.. self.config['publish_endpoint'] = None self.config['name'] = 'relay_inbound' self.config['mute'] = True # Set up fedmsg fedmsg.init(**self.config) fedmsg.meta.make_processors(**self.config) # Set up twitter and statusnet.. multiple accounts if configured settings = self.config.get('tweet_endpoints', []) apis = [twitter_api.Api(**endpoint) for endpoint in settings] # Set up bitly settings = self.config['bitly_settings'] bitly = bitlyapi.BitLy( settings['api_user'], settings['api_key'], ) # How long to sleep if we spew too fast. hibernate_duration = self.config['tweet_hibernate_duration'] # Sleep a second or two inbetween messages to try and avoid the hibernate intermessage_pause = self.config['tweet_intermessage_pause'] def _post_to_api(api, message): try: api.PostUpdate(message) except Exception as e: if 'Too many notices too fast;' in str(e): # Cool our heels then try again. self.log.info("Sleeping for %i" % hibernate_duration) time.sleep(hibernate_duration) _post_to_api(api, message) elif 'json decoding' in str(e): # Let it slide ... no idea what this one is. pass elif 'duplicate' in str(e): # Let it slide ... pass else: raise for name, ep, topic, msg in fedmsg.tail_messages(**self.config): message = fedmsg.meta.msg2subtitle(msg, **self.config) link = fedmsg.meta.msg2link(msg, **self.config) if link: link = bitly.shorten(longUrl=link)['url'] message = message[:138 - len(link)] + " " + link else: message = message[:140] if not message: self.log.info("Not tweeting blank message.") continue self.log.info("Tweeting %r" % message) for api in apis: _post_to_api(api, message) time.sleep(intermessage_pause)
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = check(pipe( test_isinstance(dict), struct( { 'ckan_of_worms.api_key': pipe( cleanup_line, not_none, ), 'ckan_of_worms.site_url': pipe( make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), not_none, ), 'user_agent': pipe( cleanup_line, not_none, ), }, default = 'drop', ), not_none, ))(dict(config_parser.items('CowBots-Check-Datasets')), default_state) global headers headers = { 'User-Agent': conf['user_agent'], } if args.fedmsg: import fedmsg fedmsg_conf = check(struct( dict( environment = pipe( empty_to_none, test_in(['dev', 'prod', 'stg']), ), modname = pipe( empty_to_none, test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), default('ckan_of_worms'), ), # name = pipe( # empty_to_none, # default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = pipe( empty_to_none, test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'dataset': if action in ('create', 'update'): dataset = check(pipe( cow_json_to_dataset, not_none, ))(message['msg'], state = default_state) check_dataset(dataset) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets'), headers = headers) response = urllib2.urlopen(request) datasets_id = check(pipe( cow_response_to_value, cow_json_to_ids, not_none, ))(response.read(), state = default_state) for dataset_id in datasets_id: request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets/{}'.format(dataset_id)), headers = headers) response = urllib2.urlopen(request) dataset = check(pipe( cow_response_to_value, cow_json_to_dataset, not_none, ))(response.read(), state = default_state) check_dataset(dataset) return 0
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'dactylo.api_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'dactylo.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Report-Activity')), conv.default_state) global headers headers = { 'User-Agent': conf['user_agent'], } global request_headers request_headers = headers.copy() request_headers['Content-Type'] = 'application/json' fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'related': activity = message['msg'] dataset = activity['target'] log.info(u'Adding activity for {} related in dataset "{}"'.format(action, dataset['name'])) request = urllib2.Request(urlparse.urljoin(conf['dactylo.site_url'], 'api/1/activities'), headers = request_headers) request_data = dict( api_key = conf['dactylo.api_key'], value = activity, ) try: response = urllib2.urlopen(request, json.dumps(request_data)) except urllib2.HTTPError as response: log.error(u'An error occured while adding activity: {}'.format(activity)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 conv.check(cow_response_to_value)(response.read(), state = conv.default_state) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) return 0
time.sleep(30) dom.create() print("Domain %s started" % (domain)) if __name__ == '__main__': pidfile = open('/var/run/harness.pid', 'w') pid = str(os.getpid()) pidfile.write(pid) pidfile.close() config = fedmsg.config.load_config([], None) config['mute'] = True config['timeout'] = 0 fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(**config): if "buildsys.build.state.change" in topic: matchedmsg = fedmsg.meta.msg2repr(msg, **config) if "completed" in matchedmsg: if "kernel" in matchedmsg: objectmsg = fedmsg.meta.msg2subtitle(msg, legacy=False, **config) package = string.split(objectmsg, ' ') fcrelease = string.split(package[1], '.') domain = domainmap(fcrelease[-1]) logfile = open('/var/log/harness.log', 'a') logfile.write('Testing ' + package[1] + '\n') logfile.close() writelatest(domain, package[1]) dom32 = domain + '32'
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'ckan.api_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'ckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'ckan_of_worms.api_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'ckan_of_worms.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CKAN-of-Worms-Harvesters')), conv.default_state) fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) source_headers = { 'Authorization': conf['ckan.api_key'], # API key is required to get full user profile. 'User-Agent': conf['user_agent'], } source_site_url = conf['ckan.site_url'] target_api_key = conf['ckan_of_worms.api_key'] target_headers = { 'User-Agent': conf['user_agent'], } target_site_url = conf['ckan_of_worms.site_url'] # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'group': if action in ('create', 'update'): group = message['msg'] log.info(u'Upserting group: {}'.format(group['name'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/groups/ckan'), headers = request_headers) try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, value = group, ))) except urllib2.HTTPError as response: log.error(u'An error occured while upserting group: {}'.format(group)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # group = response_dict['value'] # print group elif action == 'delete': group = message['msg'] log.info(u'Deleting group: {}'.format(group['id'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/groups/{}'.format(group['id'])), headers = request_headers) request.get_method = lambda: 'DELETE' try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, ))) except urllib2.HTTPError as response: if response.code == 404: log.warning(u"Deleted group doesn't exist: {}".format(group['id'])) response_dict = json.loads(response.read()) else: log.error(u'An error occured while deleting group: {}'.format(group)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # group = response_dict['value'] # print group else: log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) elif kind == 'organization': if action in ('create', 'update'): organization = message['msg'] log.info(u'Upserting organization: {}'.format(organization['name'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/organizations/ckan'), headers = request_headers) try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, value = organization, ))) except urllib2.HTTPError as response: log.error(u'An error occured while upserting organization: {}'.format(organization)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # organization = response_dict['value'] # print organization elif action == 'delete': organization = message['msg'] log.info(u'Deleting organization: {}'.format(organization['id'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/organizations/{}'.format( organization['id'])), headers = request_headers) request.get_method = lambda: 'DELETE' try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, ))) except urllib2.HTTPError as response: if response.code == 404: log.warning(u"Deleted organization doesn't exist: {}".format(organization['id'])) response_dict = json.loads(response.read()) else: log.error(u'An error occured while deleting organization: {}'.format(organization)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # organization = response_dict['value'] # print organization else: log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) elif kind == 'package': if action in ('create', 'update'): package = message['msg'] log.info(u'Upserting package: {}'.format(package['name'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/datasets/ckan'), headers = request_headers) try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, value = package, ))) except urllib2.HTTPError as response: log.error(u'An error occured while upserting package: {}'.format(package)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # dataset = response_dict['value'] # print dataset elif action == 'delete': package = message['msg'] log.info(u'Deleting package: {}'.format(package['id'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/datasets/{}'.format(package['id'])), headers = request_headers) request.get_method = lambda: 'DELETE' try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, ))) except urllib2.HTTPError as response: if response.code == 404: log.warning(u"Deleted package doesn't exist: {}".format(package['id'])) response_dict = json.loads(response.read()) else: log.error(u'An error occured while deleting package: {}'.format(package)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # dataset = response_dict['value'] # print dataset else: log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) elif kind == 'related': if action in ('create', 'update'): related = message['msg'] log.info(u'Upserting related: {}'.format(related['title'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/datasets/ckan/related'), headers = request_headers) try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, value = related, ))) except urllib2.HTTPError as response: log.error(u'An error occured while upserting related: {}'.format(related)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # related = response_dict['value'] # print related elif action == 'delete': related = message['msg'] log.info(u'Deleting related: {}'.format(related['id'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/datasets/related/{}'.format( related['id'])), headers = request_headers) request.get_method = lambda: 'DELETE' try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, ))) except urllib2.HTTPError as response: if response.code == 404: log.warning(u"Deleted related doesn't exist: {}".format(related['id'])) response_dict = json.loads(response.read()) else: log.error(u'An error occured while deleting related: {}'.format(related)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # related = response_dict['value'] # print related else: log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) elif kind == 'user': if action in ('create', 'update'): user = message['msg'] # Retrieve full user profile (with email, etc). request = urllib2.Request(urlparse.urljoin(source_site_url, 'api/3/action/user_show'), headers = source_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( id = user['id'], )))) # CKAN < 2.0 requires a POST. except urllib2.HTTPError as response: if response.code == 403: # Private user => Keep incomplete user profile. pass else: raise else: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise user = response_dict['result'] log.info(u'Upserting user: {}'.format(user['name'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/accounts/ckan'), headers = request_headers) try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, value = user, ))) except urllib2.HTTPError as response: log.error(u'An error occured while upserting user: {}'.format(user)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # account = response_dict['value'] # print account elif action == 'delete': user = message['msg'] log.info(u'Deleting user: {}'.format(user['id'])) request_headers = target_headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(target_site_url, 'api/1/accounts/{}'.format(user['id'])), headers = request_headers) request.get_method = lambda: 'DELETE' try: response = urllib2.urlopen(request, json.dumps(dict( api_key = target_api_key, ))) except urllib2.HTTPError as response: if response.code == 404: log.warning(u"Deleted user doesn't exist: {}".format(user['id'])) response_dict = json.loads(response.read()) else: log.error(u'An error occured while deleting user: {}'.format(user)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) # account = response_dict['value'] # print account else: log.warning(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) return 0
if fullname != 'releng/fedora-scm-requests': print("Dropping %r. Not scm request." % fullname) return False if 'close_status' not in fields: print("Dropping %r %r. Not closed." % (fullname, fields)) return False handle(content) if __name__ == '__main__': config = fedmsg.config.load_config() logging.config.dictConfig(config['logging']) fedmsg.meta.make_processors(**config) topic = 'io.pagure.prod.pagure.issue.edit' for _, _, topic, msg in fedmsg.tail_messages(topic=topic): # Extract some useful information for debugging title, subtitle, link, idx = [None] * 4 try: title = fedmsg.meta.msg2title(msg, **config) subtitle = fedmsg.meta.msg2subtitle(msg, **config) link = fedmsg.meta.msg2link(msg, **config) idx = msg.get('msg_id') except Exception as e: print("!! Failed to determine title, subtitle, link") print("Inspecting {title}, {subtitle}, {link}, {idx}".format( title=title, subtitle=subtitle, link=link, idx=idx)) # Extract values we need to actually process the message fullname = msg['msg']['project']['fullname'] fields = msg['msg']['fields']
def run(self): # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) # Build a message formatter formatter = lambda d: d if self.config['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if self.config['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy if self.config['query']: def formatter(d): result = fedmsg.utils.dict_query(d, self.config['query']) return ", ".join([six.text_type(value) for value in result.values()]) if self.config['terse']: formatter = lambda d: "\n" + fedmsg.meta.msg2repr(d, **self.config) if self.config['cowsay']: def formatter(d): result, error = cowsay_output(fedmsg.meta.msg2subtitle(d, **self.config)) if not error: return "\n" + result else: return "\n" + error # Build regular expressions for use in our loop. exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) # Build username and package filter sets for use in our loop. users, packages = set(), set() if self.config['users']: users = set(map(str.strip, self.config['users'].split(','))) if self.config['packages']: packages = set(map(str.strip, self.config['packages'].split(','))) # Only initialize this if we have to if users or packages or self.config['terse'] or self.config['cowsay']: fedmsg.meta.make_processors(**self.config) # Spin up a zmq.Poller and yield messages for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue if users: actual = fedmsg.meta.msg2usernames(message, **self.config) if not users.intersection(actual): continue if packages: actual = fedmsg.meta.msg2packages(message, **self.config) if not packages.intersection(actual): continue output = formatter(message) if output: self.log.info(output)
def run(self): # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) fedmsg.meta.make_processors(**self.config) # Build a message formatter formatter = lambda d: d if self.config['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if self.config['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy if self.config['terse']: formatter = lambda d: "\n" + fedmsg.meta.msg2repr(d, **self.config) if self.config['gource']: # Output strings suitable for consumption by the "gource" tool. # We have 8 colors here and an unknown number of message types. # (There were 14 message types at the time this code was written). # Here we build a dict that maps message type names (a.k.a modnames # or services) to hex colors for usage in the gource graph. We # wrap-around that dict if there are more message types than # there are colors (which there almost certainly are). procs = [proc.__name__.lower() for proc in fedmsg.meta.processors] colors = ["FFFFFF", "008F37", "FF680A", "CC4E00", "8F0058", "8F7E00", "37008F", "7E008F"] n_wraps = 1 + int(math.ceil(len(colors) / float(len(procs)))) colors = colors * n_wraps color_lookup = dict(zip(procs, colors)) cache_directory = self.config['gource_user_image_dir'] # After all that color trickiness, here is our formatter we'll use. def formatter(message): """ Use this like:: $ fedmsg-tail --gource | gource \ -i 0 \ --user-image-dir ~/.cache/gravatar/ \ --log-format custom - """ proc = fedmsg.meta.msg2processor(message, **self.config) users = fedmsg.meta.msg2usernames(message, **self.config) objs = fedmsg.meta.msg2objects(message, **self.config) name = proc.__name__.lower() if not users: users = [name] lines = [] for user, obj in itertools.product(users, objs): _grab_and_cache_avatar(user, cache_directory) lines.append("%i|%s|A|%s|%s" % ( message['timestamp'], user, name + "/" + obj, color_lookup[name], )) return "\n".join(lines) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) # The "proper" fedmsg way to do this would be to spin up or connect to # an existing Moksha Hub and register a consumer on the "*" topic that # simply prints out each message it consumes. That seems like # overkill, so we're just going to directly access the endpoints # ourself. for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue self.log.info(formatter(message))
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-d', '--delete', action = 'store_true', help = 'force datastore re-creation') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'ckan.api_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'ckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), 'weckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Datasets-to-Dataset')), conv.default_state) ckan_headers = { 'Authorization': conf['ckan.api_key'], 'User-Agent': conf['user_agent'], } organization_name = u'premier-ministre' request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/organization_show?id={}'.format(organization_name)), headers = ckan_headers) response = urllib2.urlopen(request) response_dict = json.loads(response.read()) organization = conv.check(conv.pipe( conv.make_ckan_json_to_organization(), conv.not_none, ))(response_dict['result'], state = conv.default_state) package_title = u'Jeux de données de data.gouv.fr' package_name = strings.slugify(package_title) # Try to retrieve exising package, to ensure that its resources will not be destroyed by package_update. request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/package_show?id={}'.format(package_name)), headers = ckan_headers) try: response = urllib2.urlopen(request) except urllib2.HTTPError as response: if response.code == 404: package = {} else: raise else: assert response.code == 200 response_dict = json.loads(response.read()) package = conv.check(conv.pipe( conv.make_ckan_json_to_package(drop_none_values = True), conv.not_none, ))(response_dict['result'], state = conv.default_state) package.update(dict( author = u'Secrétariat général du Gouvernement', author_email = u'*****@*****.**', extras = [ dict( key = u"Date de production des données", value = datetime.date.today().isoformat(), ), ], frequency = u'temps réel', groups = [ dict(id = strings.slugify(u'État et collectivités')), ], license_id = u'fr-lo', name = package_name, notes = u"""Les jeux de données ouvertes collectés par la mission Etalab""", owner_org = organization['id'], # relationships_as_object (list of relationship dictionaries) – see package_relationship_create() for the format of relationship dictionaries (optional) # relationships_as_subject (list of relationship dictionaries) – see package_relationship_create() for the format of relationship dictionaries (optional) # resources = [ # dict( # created = entry.get(u'Date de publication'), # format = data.get('Format'), # last_modified = entry.get(u'Date de dernière modification'), # name = data.get('Titre'), # # package_id (string) – id of package that the resource needs should be added to. # url = data['URL'], ## revision_id – (optional) ## description (string) – (optional) ## hash (string) – (optional) ## resource_type (string) – (optional) ## mimetype (string) – (optional) ## mimetype_inner (string) – (optional) ## webstore_url (string) – (optional) ## cache_url (string) – (optional) ## size (int) – (optional) ## cache_last_updated (iso date string) – (optional) ## webstore_last_updated (iso date string) – (optional) # ) # for data in entry.get(u'Données', []) + entry.get(u'Documents annexes', []) # ], state = 'active', # Undelete package if it was deleted. # tags = [ # dict( # name = tag_name, ## vocabulary_id (string) – the name or id of the vocabulary that the new tag should be added to, e.g. 'Genre' # ) # for tag_name in ( # strings.slugify(keyword)[:100] # for keyword in entry.get(u'Mots-clés', []) # if keyword is not None # ) # if len(tag_name) >= 2 # ], territorial_coverage = u'Country/FR', territorial_coverage_granularity = u'commune', title = package_title, # type (string) – the type of the dataset (optional), IDatasetForm plugins associate themselves with different dataset types and provide custom dataset handling behaviour for these types # url (string) – a URL for the dataset’s source (optional) # version (string, no longer than 100 characters) – (optional) )) if package.get('id') is None: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/package_create'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(package))) except urllib2.HTTPError as response: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(u'An exception occured while creating package: {0}'.format(package)) log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) else: assert response.code == 200 response_dict = json.loads(response.read()) else: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/package_update?id={}'.format(package_name)), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(package))) except urllib2.HTTPError as response: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(u'An exception occured while updating package: {0}'.format(package)) log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) else: assert response.code == 200 response_dict = json.loads(response.read()) package = conv.check(conv.pipe( conv.make_ckan_json_to_package(), conv.not_none, ))(response_dict['result'], state = conv.default_state) package_id = package['id'] resources = package['resources'] if resources: assert len(resources) == 1, package existing_resource = resources[0].copy() existing_resource['package_id'] = package_id else: existing_resource = {} resource = existing_resource.copy() resource.update(dict( package_id = package_id, # format = data.get('Format'), name = u"Base de données", url = urlparse.urljoin(conf['weckan.site_url'], '/dataset/{}'.format(package_name)), # revision_id – (optional) description = u"""\ Base de données générée automatiquement à partir du contenu de data.gouv.fr\ """, # format (string) – (optional) # hash (string) – (optional) # resource_type (string) – (optional) # mimetype (string) – (optional) # mimetype_inner (string) – (optional) # webstore_url (string) – (optional) # cache_url (string) – (optional) # size (int) – (optional) # created (iso date string) – (optional) # last_modified (iso date string) – (optional) # cache_last_updated (iso date string) – (optional) # webstore_last_updated (iso date string) – (optional) )) if resource != existing_resource: if resource.get('id') is None: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/resource_create'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(resource))) except urllib2.HTTPError as response: response_dict = json.loads(response.read()) for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) else: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/resource_update?id={}'.format(resource['id'])), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(resource))) except urllib2.HTTPError as response: response_dict = json.loads(response.read()) for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) resource = conv.check(conv.pipe( conv.make_ckan_json_to_resource(), conv.not_none, ))(response_dict['result'], state = conv.default_state) if args.delete: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_delete'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( resource_id = resource['id'], )))) except urllib2.HTTPError as response: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(u'An exception occured while deleting datastore: {0}'.format(resource['id'])) log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise for index in range(2): datastore = dict( fields = [ dict(id = 'author', type = 'text'), dict(id = 'author_email', type = 'text'), dict(id = 'ckan_url', type = 'text'), dict(id = 'extras', type = 'json'), dict(id = 'frequency', type = 'text'), dict(id = 'groups', type = 'json'), dict(id = 'id', type = 'text'), # dict(id = 'isopen', type = 'bool'), dict(id = 'license_id', type = 'text'), # dict(id = 'license_title', type = 'text'), # dict(id = 'license_url', type = 'text'), dict(id = 'maintainer', type = 'text'), dict(id = 'maintainer_email', type = 'text'), dict(id = 'metadata_created', type = 'date'), dict(id = 'metadata_modified', type = 'date'), dict(id = 'name', type = 'text'), dict(id = 'notes', type = 'text'), dict(id = 'organization', type = 'json'), dict(id = 'owner_org', type = 'text'), # dict(id = 'private', type = 'bool'), dict(id = 'relationships_as_object', type = 'json'), dict(id = 'relationships_as_subject', type = 'json'), dict(id = 'resources', type = 'json'), # dict(id = 'revision_id', type = 'text'), dict(id = 'revision_timestamp', type = 'timestamp'), # dict(id = 'state', type = 'text'), dict(id = 'supplier', type = 'json'), dict(id = 'supplier_id', type = 'text'), dict(id = 'tags', type = 'json'), dict(id = 'temporal_coverage_from', type = 'text'), # not a "date", because it can be just a year dict(id = 'temporal_coverage_to', type = 'text'), # not a "date", because it can be just a year dict(id = 'territorial_coverage', type = 'text'), dict(id = 'territorial_coverage_granularity', type = 'text'), dict(id = 'title', type = 'text'), # dict(id = 'tracking_summary', type = 'json'), dict(id = 'type', type = 'text'), dict(id = 'url', type = 'text'), dict(id = 'version', type = 'text'), ], primary_key = 'id', resource_id = resource['id'], ) request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_create'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(datastore))) except urllib2.HTTPError as response: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(u'An exception occured while creating datastore: {0}'.format(datastore)) log.error(response_text) raise if response.code == 409 and index == 0: # Conflict: The fields may have changed. Delete datastore and recreate it. request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_delete'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( resource_id = resource['id'], )))) except urllib2.HTTPError as response: response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(u'An exception occured while deleting datastore: {0}'.format(resource['id'])) log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise continue for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise assert response.code == 200 response_dict = json.loads(response.read()) datastore = conv.check(conv.pipe( conv.make_ckan_json_to_datastore(), conv.not_none, ))(response_dict['result'], state = conv.default_state) break if args.fedmsg: import fedmsg fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg')), state = conv.default_state) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'package': if action in ('create', 'update'): package = conv.check(conv.pipe( conv.make_ckan_json_to_package(drop_none_values = True), conv.not_none, conv.ckan_input_package_to_output_package, ))(message['msg'], state = conv.default_state) if package['id'] == package_id: # Avoid infinite loop. continue log.info(u'Upserting package "{}".'.format(package['name'])) assert package.get('ckan_url') is None, package package['ckan_url'] = urlparse.urljoin(conf['weckan.site_url'], '/dataset/{}'.format(package['name'])) request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_upsert'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( method = 'upsert', records = [package], resource_id = resource['id'], )))) except urllib2.HTTPError as response: response_text = response.read() log.error(u'An exception occured while upserting package in datastore: {0}'.format(package)) try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise assert response.code == 200 response_dict = json.loads(response.read()) assert response_dict['success'] is True # upsert = response_dict['result'] elif action == 'delete': package = message['msg'] # contains only "id". if package['id'] == package_id: # Avoid infinite loop. continue log.info(u'Deleting package "{}".'.format(package)) request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_delete'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( filters = dict( id = package['id'], ), resource_id = resource['id'], )))) except urllib2.HTTPError as response: if response.code != 404: response_text = response.read() log.error(u'An exception occured while deleting package from datastore: {0}'.format(package)) try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 response_dict = json.loads(response.read()) assert response_dict['success'] is True # delete = response_dict['result'] else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: # Retrieve names of packages already existing in CKAN. request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/package_list'), headers = ckan_headers) response = urllib2.urlopen(request) response_dict = json.loads(response.read()) packages_name = conv.check(conv.pipe( conv.ckan_json_to_name_list, conv.not_none, ))(response_dict['result'], state = conv.default_state) for package_name in packages_name: request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/package_show?id={}'.format(package_name)), headers = ckan_headers) response = urllib2.urlopen(request) response_dict = json.loads(response.read()) package = conv.check(conv.pipe( conv.make_ckan_json_to_package(drop_none_values = True), conv.not_none, conv.ckan_input_package_to_output_package, ))(response_dict['result'], state = conv.default_state) assert package.get('ckan_url') is None, package package['ckan_url'] = urlparse.urljoin(conf['weckan.site_url'], '/dataset/{}'.format(package['name'])) request = urllib2.Request(urlparse.urljoin(conf['ckan.site_url'], '/api/3/action/datastore_upsert'), headers = ckan_headers) try: response = urllib2.urlopen(request, urllib.quote(json.dumps(dict( method = 'upsert', records = [package], resource_id = resource['id'], )))) except urllib2.HTTPError as response: response_text = response.read() log.error(u'An exception occured while upserting package in datastore: {0}'.format(package)) try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise assert response.code == 200 response_dict = json.loads(response.read()) assert response_dict['success'] is True # upsert = response_dict['result'] return 0
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'ckan_of_worms.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'tumblr.access_token_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'tumblr.access_token_secret': conv.pipe( conv.cleanup_line, conv.not_none, ), 'tumblr.client_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'tumblr.client_secret': conv.pipe( conv.cleanup_line, conv.not_none, ), 'tumblr.hostname': conv.pipe( conv.cleanup_line, conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Tumblr-Related')), conv.default_state) cache_dir = os.path.join(app_dir, 'cache') if not os.path.exists(cache_dir): os.makedirs(cache_dir) data_dir = os.path.join(app_dir, 'data') if not os.path.exists(data_dir): os.makedirs(data_dir) global db db = anydbm.open(os.path.join(data_dir, 'tumblr-posts'), 'c') global headers headers = { 'User-Agent': conf['user_agent'], } global templates_lookup templates_lookup = mako.lookup.TemplateLookup( default_filters = ['h'], directories = [os.path.join(app_dir, 'tumblr-related-templates')], input_encoding = 'utf-8', module_directory = os.path.join(cache_dir, 'tumblr-related-templates'), strict_undefined = True, ) # # To obtain access token, uncomment the following code, run it and put the results in configuration file. # oauth = requests_oauthlib.OAuth1Session(conf['tumblr.client_key'], client_secret = conf['tumblr.client_secret']) # fetch_response = oauth.fetch_request_token('http://www.tumblr.com/oauth/request_token') # request_token_key = fetch_response.get('oauth_token') # request_token_secret = fetch_response.get('oauth_token_secret') # authorization_url = oauth.authorization_url('http://www.tumblr.com/oauth/authorize') # print 'Please go here and authorize,', authorization_url # redirect_response = raw_input('Paste the full redirect URL here: ') # oauth_response = oauth.parse_authorization_response(redirect_response) # verifier = oauth_response.get('oauth_verifier') # access_token_url = 'http://www.tumblr.com/oauth/access_token' # oauth = requests_oauthlib.OAuth1Session(conf['tumblr.client_key'], # client_secret = conf['tumblr.client_secret'], # resource_owner_key = request_token_key, # resource_owner_secret = request_token_secret, # verifier = verifier, # ) # oauth_tokens = oauth.fetch_access_token(access_token_url) # access_token_key = oauth_tokens.get('oauth_token') # print 'access_token_key =', access_token_key # access_token_secret = oauth_tokens.get('oauth_token_secret') # print 'access_token_secret =', access_token_secret # return 0 global oauth oauth = requests_oauthlib.OAuth1(conf['tumblr.client_key'], conf['tumblr.client_secret'], conf['tumblr.access_token_key'], conf['tumblr.access_token_secret']) if args.fedmsg: import fedmsg fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'dataset': if action in ('create', 'update'): dataset_upserted(message['msg']) else: request = requests.get(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets'), params = dict( related = 1, ), headers = headers, ) datasets_id = conv.check(conv.pipe( cow_response_to_value, conv.not_none, ))(request.text, state = conv.default_state) for dataset_id in datasets_id: response = requests.get(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets/{}'.format(dataset_id)), headers = headers) dataset = conv.check(conv.pipe( cow_response_to_value, conv.not_none, ))(response.text, state = conv.default_state) dataset_upserted(dataset) return 0
def run(self): # This is a "required" option... :P if not self.config['command']: self.log.error("You must provide a --command to run.") sys.exit(1) # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-trigger won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) wait_for = int(self.config['wait_for']) max_queue_size = int(self.config['max_queue_size']) timer = None que = queue.Queue() def execute_queue(): while not que.empty(): message = que.get() result = self.run_command(self.config['command'], message) if result != 0: self.log.info("Command returned error code %r" % result) try: for name, ep, topic, message in fedmsg.tail_messages( **self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue que.put(message) if timer is not None: # Try to cancel it timer.cancel() if timer is None or not timer.is_alive(): # Either there was no timer yet, or it was still waiting # -> Let's start a new one if (max_queue_size > 0 and que.qsize() > max_queue_size): # If the que is too big, let's just run it NOW timer = threading.Timer(0, execute_queue) else: timer = threading.Timer(wait_for, execute_queue) timer.start() except KeyboardInterrupt: if timer is not None: timer.cancel() # Let's wait for commands to finish timer.join()
def run(self): # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) fedmsg.meta.make_processors(**self.config) # Build a message formatter formatter = lambda d: d if self.config['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if self.config['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter() ).strip() return "\n" + fancy if self.config['terse']: formatter = lambda d: "\n" + fedmsg.meta.msg2repr(d, **self.config) if self.config['gource']: # Output strings suitable for consumption by the "gource" tool. # We have 8 colors here and an unknown number of message types. # (There were 14 message types at the time this code was written). # Here we build a dict that maps message type names (a.k.a modnames # or services) to hex colors for usage in the gource graph. We # wrap-around that dict if there are more message types than # there are colors (which there almost certainly are). procs = [proc.__name__.lower() for proc in fedmsg.meta.processors] colors = ["FFFFFF", "008F37", "FF680A", "CC4E00", "8F0058", "8F7E00", "37008F", "7E008F"] n_wraps = 1 + int(math.ceil(len(colors) / float(len(procs)))) colors = colors * n_wraps color_lookup = dict(zip(procs, colors)) cache_directory = self.config['gource_user_image_dir'] # After all that color trickiness, here is our formatter we'll use. def formatter(message): """ Use this like:: $ fedmsg-tail --gource | gource \ -i 0 \ --user-image-dir ~/.cache/avatar/ \ --log-format custom - """ proc = fedmsg.meta.msg2processor(message, **self.config) avatars = fedmsg.meta.msg2avatars(message, **self.config) objs = fedmsg.meta.msg2objects(message, **self.config) name = proc.__name__.lower() lines = [] for user, obj in itertools.product(avatars.keys(), objs): _cache_avatar(user, avatars[user], cache_directory) lines.append("%i|%s|A|%s|%s" % ( message['timestamp'], user, name + "/" + obj, color_lookup[name], )) return "\n".join(lines) # Build regular expressions for use in our loop. exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) # Build username and package filter sets for use in our loop. users, packages = set(), set() if self.config['users']: users = set(map(str.strip, self.config['users'].split(','))) if self.config['packages']: packages = set(map(str.strip, self.config['packages'].split(','))) # Spin up a zmq.Poller and yield messages for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue actual_users = fedmsg.meta.msg2usernames(message, **self.config) if users and not users.intersection(actual_users): continue actual_packages = fedmsg.meta.msg2packages(message, **self.config) if packages and not packages.intersection(actual_packages): continue self.log.info(formatter(message))
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'admin_email': conv.pipe( conv.function(lambda emails: set(emails.split())), conv.uniform_sequence( conv.pipe( conv.input_to_email, conv.test_email(), ), constructor = lambda emails: sorted(set(emails)), drop_none_items = True, ), conv.empty_to_none, conv.not_none, ), 'ckan_of_worms.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'from_email': conv.pipe( conv.input_to_email, conv.test_email(), conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), 'weckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'wiki.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'youckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Email-Changes')), conv.default_state) cache_dir = os.path.join(app_dir, 'cache') if not os.path.exists(cache_dir): os.makedirs(cache_dir) global headers headers = { 'User-Agent': conf['user_agent'], } global templates_lookup templates_lookup = mako.lookup.TemplateLookup( directories = [os.path.join(app_dir, 'email-changes-templates')], input_encoding = 'utf-8', module_directory = os.path.join(cache_dir, 'email-changes-templates'), ) if args.fedmsg: import fedmsg fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value ckan_of_worms_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) wiki_topic_prefix = '{}.{}.wiki.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if topic.startswith(ckan_of_worms_topic_prefix): kind, action = topic[len(ckan_of_worms_topic_prefix):].split('.') if kind == 'account': if action == 'create': account_created(message['msg']) elif kind == 'dataset': if action == 'create': dataset_created(message['msg']) elif kind == 'group': if action == 'create': group_created(message['msg']) elif kind == 'organization': if action == 'create': organization_created(message['msg']) elif kind == 'related': if action == 'create': related_created(message['msg']) elif topic.startswith(wiki_topic_prefix): kind, action = topic[len(wiki_topic_prefix):].split('.') if kind == 'article': if action == 'edit': article_edited(message['msg']) elif kind == 'upload': if action == 'complete': upload_completed(message['msg']) else: log.debug(u'Ignoring message: {}, {}'.format(topic, name)) else: pass # TODO return 0
def run(self): for name, endpoint, topic, msg in fedmsg.tail_messages(): for observer in self._observers.get(topic, []): observer(name, endpoint, topic, msg)
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'ckan_of_worms.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'twitter.access_token_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'twitter.access_token_secret': conv.pipe( conv.cleanup_line, conv.not_none, ), 'twitter.consumer_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'twitter.consumer_secret': conv.pipe( conv.cleanup_line, conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), 'weckan.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Tweet-Changes')), conv.default_state) global headers headers = { 'User-Agent': conf['user_agent'], } global twitter_api twitter_api = twitter.Api( consumer_key = conf['twitter.consumer_key'], consumer_secret = conf['twitter.consumer_secret'], access_token_key = conf['twitter.access_token_key'], access_token_secret = conf['twitter.access_token_secret'], ) # print twitter_api.VerifyCredentials() thread.start_new_thread(tweet_messages, ()) if args.fedmsg: import fedmsg fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'account': if action == 'create': account_created(message['msg']) if kind == 'dataset': if action == 'create': dataset_created(message['msg']) elif kind == 'group': if action == 'create': group_created(message['msg']) elif kind == 'organizaton': if action == 'create': organization_created(message['msg']) else: pass # TODO return 0
break time.sleep(30) dom.create() print "Domain %s started" % (domain) if __name__ == '__main__': pidfile = open('/var/run/harness.pid', 'w') pid = str(os.getpid()) pidfile.write(pid) pidfile.close() config = fedmsg.config.load_config([], None) config['mute'] = True config['timeout'] = 0 fedmsg.meta.make_processors(**config) for name, endpoint, topic, msg in fedmsg.tail_messages(**config): if "buildsys.build.state.change" in topic: matchedmsg = fedmsg.meta.msg2repr(msg, **config) if "completed" in matchedmsg: if "kernel" in matchedmsg: objectmsg = fedmsg.meta.msg2subtitle(msg, legacy=False, **config) package = string.split(objectmsg, ' ') fcrelease = string.split(package[1], '.') domain = domainmap(fcrelease[-1]) logfile = open('/var/log/harness.log', 'a') logfile.write('Testing ' + package[1] + '\n') logfile.close() writelatest(domain, package[1]) dom32 = domain + '32' dom64 = domain + '64' print "starting domain %s" % (dom32)
# Tomas Hozza <*****@*****.**> # It requires fedmsg package from __future__ import print_function import fedmsg import requests from rebasehelper.upstream_monitoring import UpstreamMonitoring # Testing command line # echo "{'package':'wget', 'version': '1.6.13'} | fedmsg-logger # version update is VERSION_UPDATE = 'anitya.project.version.update' while True: try: message = fedmsg.tail_messages() for name, endpoint, topic, msg in message: # TODO Use logger instead of print like rebase-upstream.log file in /var/log/ #print (topic) if topic.endswith(VERSION_UPDATE): #print (endpoint) #print (msg) try: up = UpstreamMonitoring(name, endpoint, topic, msg) up.process_messsage() except: raise except requests.exceptions.ConnectionError: pass
badgebot_topic_filter = config.get('badgebot_topic_filter') badgebot_username = config.get('badgebot_fas_username') # This is mandatory, otherwise we'll get banned from twitter. if not badgebot_topic_filter: print "No 'badgebot_topic_filter' found in the fedmsg config. Bailing." sys.exit(3) print "Looking for messages with %r in the topic" % badgebot_topic_filter if badgebot_username: print " Relating to the user %r" % badgebot_username else: print " Relating to any user at all!" print "Posting up to listen on the fedmsg bus. Waiting for a message..." for name, endpoint, topic, msg in fedmsg.tail_messages(): if badgebot_topic_filter not in topic: continue if badgebot_username: users = fedmsg.meta.msg2usernames(msg, **config) if badgebot_username not in users: print "%r is not among %r" % (badgebot_username, users) continue else: print "'badgebot_fas_username' not specified. Proceeding." # So, we know we have a fedbadges message and that it is for us. icon = fedmsg.meta.msg2icon(msg, **config) subtitle = fedmsg.meta.msg2subtitle(msg, **config) link = fedmsg.meta.msg2link(msg, **config)
def tail_messages(topic="", passive=False, **kw): for n, e, t, m in fedmsg.tail_messages(topic=topic, passive=passive, **kw): # Only acknowledge gilmsg messages to avoid catastrophic spam storm _acknowledge(m, **kw) yield n, e, t, m
def tweet(**kw): """ Rebroadcast messages to twitter and statusnet New values in the fedmsg configuration are needed for this to work. Lists and dicts of authentication credentials such as: - :term:`tweet_endpoints` - :term:`bitly_settings` And scalars to help with rate limiting such as: - :term:`tweet_hibernate_duration` - :term:`tweet_intermessage_pause` """ # First, sanity checking. if not kw.get('tweet_endpoints', None): raise ValueError("Not configured to tweet.") # Boilerplate.. kw['publish_endpoint'] = None kw['name'] = 'relay_inbound' kw['mute'] = True # Set up fedmsg fedmsg.init(**kw) fedmsg.text.make_processors(**kw) # Set up twitter and statusnet.. multiple accounts if configured settings = kw.get('tweet_endpoints', []) apis = [twitter_api.Api(**endpoint) for endpoint in settings] # Set up bitly settings = kw['bitly_settings'] bitly = bitlyapi.BitLy( settings['api_user'], settings['api_key'], ) # How long to sleep if we spew too fast. hibernate_duration = kw['tweet_hibernate_duration'] # Sleep a second or two inbetween messages to try and avoid the hibernate intermessage_pause = kw['tweet_intermessage_pause'] def _post_to_api(api, message): try: api.PostUpdate(message) except Exception as e: if 'Too many notices too fast;' in str(e): # Cool our heels then try again. print "Sleeping for", hibernate_duration time.sleep(hibernate_duration) _post_to_api(api, message) elif 'json decoding' in str(e): # Let it slide ... no idea what this one is. pass elif 'duplicate' in str(e): # Let it slide ... pass else: raise for name, ep, topic, msg in fedmsg.tail_messages(**kw): message = fedmsg.text.msg2subtitle(msg, **kw) link = fedmsg.text.msg2link(msg, **kw) if link: link = bitly.shorten(longUrl=link)['url'] message = (message[:138] + " ")[:139 - len(link)] + link else: message = message[:140] print("Tweeting %r" % message) for api in apis: _post_to_api(api, message) time.sleep(intermessage_pause)
def yield_all_messages() -> Iterable[Tuple[str, dict]]: logger.info("listening on fedmsg") for name, endpoint, topic, msg in fedmsg.tail_messages(): yield topic, msg
def _yield_messages(topic: str) -> Iterable[Tuple[str, dict]]: logger.info("listening on fedmsg, topic=%s", topic) for name, endpoint, topic, msg in fedmsg.tail_messages(topic=topic): yield topic, msg
def run(self): # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-tail won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) # Build a message formatter formatter = lambda d: d if self.config['pretty']: def formatter(d): d['timestamp'] = time.ctime(d['timestamp']) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if self.config['really_pretty']: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter()).strip() return "\n" + fancy if self.config['query']: def formatter(d): result = fedmsg.utils.dict_query(d, self.config['query']) return ", ".join( [six.text_type(value) for value in result.values()]) if self.config['terse']: formatter = lambda d: "\n" + fedmsg.meta.msg2repr(d, **self.config) if self.config['cowsay']: def formatter(d): result, error = cowsay_output( fedmsg.meta.msg2subtitle(d, **self.config)) if not error: return "\n" + result else: return "\n" + error # Build regular expressions for use in our loop. exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) # Build username and package filter sets for use in our loop. users, packages = set(), set() if self.config['users']: users = set(map(str.strip, self.config['users'].split(','))) if self.config['packages']: packages = set(map(str.strip, self.config['packages'].split(','))) # Only initialize this if we have to if users or packages or self.config['terse'] or self.config['cowsay']: fedmsg.meta.make_processors(**self.config) # Spin up a zmq.Poller and yield messages for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue if users: actual = fedmsg.meta.msg2usernames(message, **self.config) if not users.intersection(actual): continue if packages: actual = fedmsg.meta.msg2packages(message, **self.config) if not packages.intersection(actual): continue output = formatter(message) if output: self.log.info(output)
fedmsg.meta.make_processors(**config) sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sck.connect((irc, port)) sck.send('NICK ' + nick + '\r\n') sck.send('USER ' + nick + ' ' + nick + ' ' + nick + ' :Fedbadges bot\r\n') sck.send('JOIN ' + channel + '\r\n') data = '' while True: data = sck.recv(2040) if data.find('PING') != -1: sck.send('PONG ' + data.split()[1] + '\r\n') break print "Posting up to listen on the fedmsg bus. Waiting for a message..." for name, endpoint, topic, msg in fedmsg.tail_messages(): if topic_filter not in topic: continue title = fedmsg.meta.msg2title(msg, **config) if title is not 'fedbadges.badge.award': continue subtitle = fedmsg.meta.msg2subtitle(msg, **config) link = fedmsg.meta.msg2link(msg, **config) sck.send('PRIVMSG ' + channel + '|' + nick + ' :' + subtitle + ". Congratulations. Checkout the badges here:" + link + '\r\n')
def main(): # grab token and connec to pagure token = os.getenv('PAGURE_TOKEN') if token: logger.info("Using detected token to talk to pagure.") pg = Pagure(pagure_token=token) else: logger.info("No pagure token was detected.") logger.info( "This script will run but won't be able to create new issues.") pg = Pagure() # Set the repo to create new issues against pg.repo = PAGURE_REPO # Used for printing out a value when the day has changed date = datetime.date.today() # Grab messages from fedmsg and process them as we go logger.info("Starting listening for fedmsgs..") for name, endpoint, topic, msg in fedmsg.tail_messages(): logger.debug(topic) # Print out a log statement if the day has changed today = datetime.date.today() if today != date: date = today logger.info('mark') if "pungi.compose.status.change" in topic: print('.', end='') # some sort of indicator of progress print('.') # some sort of indicator of progress status = msg['msg']['status'] # If we are in good states then continue if status in ['FINISHED', 'STARTED']: continue # We have a compose that either failed or had missing artifacts # create a new issue. title = msg['msg']['compose_id'] + ' ' + status logfileurl = msg['msg'][ 'location'] + '/../logs/global/pungi.global.log' logger.info("%s\t%s" % (title, logfileurl)) # variable to hold description for issue content = "[pungi.global.log](%s)\n\n" % logfileurl # If we fail to get the log file contents then we'll just # best effort put a message in the issue. try: lines = requests.get(logfileurl).text.splitlines() except: logger.info( "Failed to retrieve log contents from server.. skipping analysis" ) content += "Failed to retrieve log contents from server.. skipping analysis" lines = [] pass for x in range(1, len(lines)): line = lines[x - 1][20:] # trim date off log lines nextline = lines[x][20:] # trim date off log lines # If this is a [FAIL] line then we take it and the # next line and add them in markdown format. Also grab # the taskid if we can and print a hyperlink to koji if re.search('\[FAIL\]', line): content += get_supporting_text(nextline) content += "```\n%s\n%s\n```\n\n" % (line, nextline) # If this is the Compose run failed line, then add it # to the description too if re.search('.*Compose run failed.*', line): content += ("- Compose run failed because: %s\n" % get_supporting_text(line)) content += "```\n%s\n```\n" % (line) logger.debug(content) # pull only part of the compose ID for the tag to set tag = re.search('(.*)-\d{8}', msg['msg']['compose_id']).group(1) #TODO figure out how to set tag on an issue if token: pg.create_issue(title=title, content=content)
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = conv.check(conv.pipe( conv.test_isinstance(dict), conv.struct( { 'ckan_of_worms.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'dactylo.api_key': conv.pipe( conv.cleanup_line, conv.not_none, ), 'dactylo.site_url': conv.pipe( conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), conv.not_none, ), 'user_agent': conv.pipe( conv.cleanup_line, conv.not_none, ), }, default = 'drop', ), conv.not_none, ))(dict(config_parser.items('CowBots-Report-State')), conv.default_state) global headers headers = { 'User-Agent': conf['user_agent'], } global request_headers request_headers = headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/metrics'), headers = headers) response = urllib2.urlopen(request) global metrics metrics = conv.check(conv.pipe( cow_response_to_value, conv.test_isinstance(dict), conv.not_none, ))(response.read(), state = conv.default_state) send_stats() fedmsg_conf = conv.check(conv.struct( dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan_of_worms'), ), # name = conv.pipe( # conv.empty_to_none, # conv.default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'account': account = message['msg'] name = account['name'] if action == 'create': metrics['accounts_count'] += 1 log.info(u'Updating stats after "{} account {}"'.format(action, name)) send_stats() elif action == 'delete': metrics['accounts_count'] -= 1 log.info(u'Updating stats after "{} account {}"'.format(action, name)) send_stats() elif action != 'update': log.warning(u'Unknown action "{}" for account "{}"'.format(action, account)) elif kind == 'dataset': dataset = message['msg'] name = dataset['name'] existing_dataset_metrics = metrics['datasets'].get(name, {}) if action in ('create', 'update'): dataset_metrics = dict( organization_name = (dataset.get('organization') or {}).get('name'), related_count = len(dataset.get('related') or []), resources = [ dict( format = resource.get('format'), ) for resource in (dataset.get('resources') or []) ], weight = dataset['weight'], ) elif action == 'delete': dataset_metrics = None else: log.warning(u'Unknown action "{}" for dataset "{}"'.format(action, dataset)) continue if dataset_metrics != existing_dataset_metrics: log.info(u'Updating stats after "{} dataset {}"'.format(action, name)) if dataset_metrics is None: del metrics['datasets'][name] else: metrics['datasets'][name] = dataset_metrics send_stats() elif kind == 'organization': organization = message['msg'] name = organization['name'] existing_organization_metrics = metrics['organizations'].get(name, {}) if action in ('create', 'update'): organization_metrics = dict( public_service = bool(organization.get('public_service')), ) elif action == 'delete': organization_metrics = None else: log.warning(u'Unknown action "{}" for organization "{}"'.format(action, organization)) continue if organization_metrics != existing_organization_metrics: log.info(u'Updating stats after "{} organization {}"'.format(action, name)) if organization_metrics is None: del metrics['organizations'][name] else: metrics['organizations'][name] = organization_metrics send_stats() else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) return 0
def run(self): # This is a "required" option... :P if not self.config['command']: self.log.error("You must provide a --command to run.") sys.exit(1) # Disable sending self.config['publish_endpoint'] = None # Disable timeouts. We want to tail forever! self.config['timeout'] = 0 # Even though fedmsg-trigger won't be sending any messages, give it a # name to conform with the other commands. self.config['name'] = 'relay_inbound' # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config['mute'] = True fedmsg.init(**self.config) exclusive_regexp = re.compile(self.config['exclusive_regexp']) inclusive_regexp = re.compile(self.config['inclusive_regexp']) wait_for = int(self.config['wait_for']) max_queue_size = int(self.config['max_queue_size']) timer = None que = queue.Queue() def execute_queue(): while not que.empty(): message = que.get() result = self.run_command(self.config['command'], message) if result != 0: self.log.info("Command returned error code %r" % result) try: for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue que.put(message) if timer is not None: # Try to cancel it timer.cancel() if timer is None or not timer.is_alive(): # Either there was no timer yet, or it was still waiting # -> Let's start a new one if (max_queue_size > 0 and que.qsize() > max_queue_size): # If the que is too big, let's just run it NOW timer = threading.Timer(0, execute_queue) else: timer = threading.Timer(wait_for, execute_queue) timer.start() except KeyboardInterrupt: if timer is not None: timer.cancel() # Let's wait for commands to finish timer.join()
def run(self): # Disable sending self.config["publish_endpoint"] = None # Disable timeouts. We want to tail forever! self.config["timeout"] = 0 # Even though fedmsg-tail won't be sending any messages, give it a # name to conform with the other commands. self.config["name"] = "relay_inbound" # Tail is never going to send any messages, so we suppress warnings # about having no publishing sockets established. self.config["mute"] = True fedmsg.init(**self.config) fedmsg.meta.make_processors(**self.config) # Build a message formatter formatter = lambda d: d if self.config["pretty"]: def formatter(d): d["timestamp"] = time.ctime(d["timestamp"]) d = fedmsg.crypto.strip_credentials(d) return "\n" + pprint.pformat(d) if self.config["really_pretty"]: def formatter(d): d = fedmsg.crypto.strip_credentials(d) fancy = pygments.highlight( fedmsg.encoding.pretty_dumps(d), pygments.lexers.JavascriptLexer(), pygments.formatters.TerminalFormatter(), ).strip() return "\n" + fancy if self.config["terse"]: formatter = lambda d: "\n" + fedmsg.meta.msg2repr(d, **self.config) if self.config["gource"]: # Output strings suitable for consumption by the "gource" tool. # We have 8 colors here and an unknown number of message types. # (There were 14 message types at the time this code was written). # Here we build a dict that maps message type names (a.k.a modnames # or services) to hex colors for usage in the gource graph. We # wrap-around that dict if there are more message types than # there are colors (which there almost certainly are). procs = [proc.__name__.lower() for proc in fedmsg.meta.processors] colors = ["FFFFFF", "008F37", "FF680A", "CC4E00", "8F0058", "8F7E00", "37008F", "7E008F"] n_wraps = 1 + int(math.ceil(len(colors) / float(len(procs)))) colors = colors * n_wraps color_lookup = dict(zip(procs, colors)) cache_directory = self.config["gource_user_image_dir"] # After all that color trickiness, here is our formatter we'll use. def formatter(message): """ Use this like:: $ fedmsg-tail --gource | gource \ -i 0 \ --user-image-dir ~/.cache/gravatar/ \ --log-format custom - """ proc = fedmsg.meta.msg2processor(message, **self.config) users = fedmsg.meta.msg2usernames(message, **self.config) objs = fedmsg.meta.msg2objects(message, **self.config) name = proc.__name__.lower() if not users: users = [name] lines = [] for user, obj in itertools.product(users, objs): _grab_and_cache_avatar(user, cache_directory) lines.append("%i|%s|A|%s|%s" % (message["timestamp"], user, name + "/" + obj, color_lookup[name])) return "\n".join(lines) exclusive_regexp = re.compile(self.config["exclusive_regexp"]) inclusive_regexp = re.compile(self.config["inclusive_regexp"]) # The "proper" fedmsg way to do this would be to spin up or connect to # an existing Moksha Hub and register a consumer on the "*" topic that # simply prints out each message it consumes. That seems like # overkill, so we're just going to directly access the endpoints # ourself. for name, ep, topic, message in fedmsg.tail_messages(**self.config): if exclusive_regexp.search(topic): continue if not inclusive_regexp.search(topic): continue self.log.info(formatter(message))