def __call__(self, master_url): try: master_json = buildbot.fetch_master_json(master_url) if not master_json: return (None, None, None, master_url) master_alerts, stale_master_alert = alert_builder.alerts_for_master( self._cache, master_url, master_json, self._old_alerts, self._builder_filter, self._jobs) # FIXME: The builder info doesn't really belong here. The builder # revisions tool uses this and we happen to have the builder json cached # at this point so it's cheap to compute, but it should be moved # to a different feed. data, stale_builder_alerts = ( buildbot.latest_builder_info_and_alerts_for_master( self._cache, master_url, master_json)) if stale_master_alert: stale_builder_alerts.append(stale_master_alert) return (master_alerts, data, stale_builder_alerts, master_url) except: # Put all exception text into an exception and raise that so it doesn't # get eaten by the multiprocessing code. msg = '%s for master url %s' % ( ''.join(traceback.format_exception(*sys.exc_info())), master_url, ) raise Exception(msg)
def __call__(self, master_url): try: master_json = buildbot.fetch_master_json(master_url) if not master_json: return (None, None, None, master_url) master_alerts, stale_master_alert = alert_builder.alerts_for_master( self._cache, master_url, master_json, self._old_alerts, self._builder_filter, self._jobs) # FIXME: The builder info doesn't really belong here. The builder # revisions tool uses this and we happen to have the builder json cached # at this point so it's cheap to compute, but it should be moved # to a different feed. data, stale_builder_alerts = ( buildbot.latest_builder_info_and_alerts_for_master( self._cache, master_url, master_json)) if stale_master_alert: stale_builder_alerts.append(stale_master_alert) return (master_alerts, data, stale_builder_alerts, master_url) except: # Put all exception text into an exception and raise that so it doesn't # get eaten by the multiprocessing code. raise Exception(''.join( traceback.format_exception(*sys.exc_info())))
def main(args): # pragma: no cover logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('builder_url', action='store') args = parser.parse_args(args) # https://build.chromium.org/p/chromium.win/builders/XP%20Tests%20(1) url_regexp = re.compile('(?P<master_url>.*)/builders/(?P<builder_name>.*)/?') match = url_regexp.match(args.builder_url) # FIXME: HACK CACHE_PATH = 'build_cache' cache = buildbot.DiskCache(CACHE_PATH) master_url = match.group('master_url') builder_name = urllib.unquote_plus(match.group('builder_name')) master_json = buildbot.fetch_master_json(master_url) # This is kinda a hack, but uses more of our existing code this way: alerts = alerts_for_master(cache, master_url, master_json, builder_name) print json.dumps(alerts[0], indent=1)