def collect(team_name, dryrun=False): """Collect and push errors.u.c related metrics.""" # check to see if its a vaild team LP team try: lp.LP.people[team_name] except KeyError: print('Team %s does not exist in LP.' % team_name) return data = [] mcp_data = team_subscribed_mcp_count(team_name) for series in mcp_data: print("%s: %s" % (series, mcp_data[series]['sum_top_ten_counts'])) if not dryrun: # metric names can not have a hyphen in them team_name = team_name.replace('-', '_') print('Pushing data...') for series in mcp_data: data.append({ 'measurement': '%s_errors_mcp_sum_top_ten' % team_name, 'fields': { 'count': mcp_data[series]['sum_top_ten_counts'] }, 'tags': { 'series': series } }) util.influxdb_insert(data)
def collect(team_name, dryrun=False): """Collect data and push to InfluxDB.""" team = lp.LP.people[team_name] counts = {i: dict.fromkeys(STATUS_LIST, 0) for i in IMPORTANCE_LIST} tasks = lp.LP.bugs.searchTasks(assignee=team, status=STATUS_LIST) for task in tasks: counts[task.importance][task.status] += 1 # Thing to note: currently private bugs are not counted. data = [] for importance, statuses in counts.items(): for status, count in statuses.items(): print('{} importance assigned bugs with {} status: {}'.format( importance, status, count)) data.append({ 'measurement': '{}_assigned_bugs'.format(team_name.replace('-', '_')), 'tags': { 'importance': importance, 'status': status, }, 'fields': { 'count': count } }) if not dryrun: print('Pushing data...') util.influxdb_insert(data)
def csv2influx(csv_filename, measurement): """ Push CSV data to InfluxDB. @param csv_filename: csv filename to load into InfluxDB @param measurement: measurement name to use for data """ data = [] with open(csv_filename) as csv_file: reader = csv.DictReader(csv_file) for row in reader: date = row.pop('date') try: fields = {k: int(v) if v else 0 for k, v in dict(row).items()} except TypeError: print('Unknown value (not an int) on this row:') print(row) sys.exit(1) entry = { "measurement": measurement, "fields": fields, "time": date } data.append(entry) util.influxdb_insert(data)
def collect(dryrun=False): """Submit data to Push Gateway.""" try: devel = distro_info.UbuntuDistroInfo().devel() except distro_info.DistroDataOutdated: devel = distro_info.UbuntuDistroInfo().stable() devel_results = get_iso_size_data(devel) print('%s: %s' % (devel, devel_results)) lts = distro_info.UbuntuDistroInfo().lts() lts_results = get_iso_size_data(lts, True) print('%s: %s' % (lts, lts_results)) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'iso_size_devel', 'fields': devel_results, }, { 'measurement': 'iso_size_lts', 'fields': lts_results, }] util.influxdb_insert(data)
def collect(dryrun=False): """Submit data to Push Gateway.""" unassigned = lp.get_team_subscribed_unassigned_bugs(team='ubuntu-mir', distro='Ubuntu') incomplete = lp.get_team_subscribed_incomplete_bugs(team='ubuntu-mir', distro='Ubuntu') pending = lp.get_mirs_in_review() security = lp.get_mirs_in_security_review() approved = lp.get_approved_mirs() print('Unassigned Total: %s' % unassigned) print('Incomplete Total: %s' % incomplete) print('Pending Total: %s' % pending) print('Security Total: %s' % security) print('Approved Total: %s' % approved) if not dryrun: print('Pushing data...') data = [ { 'measurement': 'distro_mir_team_bugs', 'fields': { 'unassigned': unassigned, 'incomplete': incomplete, 'pending': pending, 'approved': approved, } } ] util.influxdb_insert(data)
def collect(queue_name, dryrun=False): """Collect and push autopkgtest queue depth metrics.""" queue_details = QUEUES_JSON[queue_name] for release in queue_details: for arch in queue_details[release]: count = len(queue_details[release][arch]) print('%s %s: %i' % (release.title(), arch, count)) if not dryrun: print('Pushing data...') data = [] for release in queue_details: for arch in queue_details[release]: count = len(queue_details[release][arch]) data.append({ 'measurement': 'foundations_autopkgtest_queue', 'fields': { 'queue_size': count, }, 'tags': { 'queue': queue_name, 'release': release, 'arch': arch, } }) util.influxdb_insert(data)
def collect(team_name, dryrun=False): """Submit data to Push Gateway.""" results_by_component = get_merge_data(team_name) print('%s' % (results_by_component,)) if not dryrun: print('Pushing data...') main = results_by_component['main'] data = [ { 'measurement': 'metric_merges_%s' % team_name, 'fields': { 'excluded': main['excluded'], 'local': main['local'], 'modified': main['modified'], 'needs-merge': main['needs-merge'], 'needs-sync': main['needs-sync'], 'repackaged': main['repackaged'], 'unmodified': main['unmodified'], } } ] print(data) util.influxdb_insert(data)
def collect(dryrun=False): """Submit data to Push Gateway.""" results = get_vagrant_data() print(results) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'vagrant_downloads', 'fields': results, }] util.influxdb_insert(data)
def collect(environment, dryrun=False): """Collect and push retracers results metrics.""" base_errors_url = BASE_ERRORS_URL if environment == 'staging': base_errors_url = base_errors_url.replace('errors.', 'errors.staging.') retrace_results_json = get_rresults_data(base_errors_url) if len(retrace_results_json['objects']) == 0: print("No retracing has occurred") sys.exit(1) if retrace_results_json['objects'][0]['date'] != TODAY.strftime('%Y%m%d'): print("The results are not for today, quitting.") sys.exit(0) results = retrace_results_json['objects'][0]['value'] for value in results: data = [] arch = '' release = value if ':' in value: release, arch = value.split(':') if dryrun: if arch: print("%s:%s" % (release, arch)) else: print("%s" % value) for result in results[value]: count = results[value][result] if not result: result = 'unclassified' if dryrun: print("%s: %s" % (result, count)) continue data.append({ # we don't need per minute counts of results 'time': datetime(TODAY.year, TODAY.month, TODAY.day), 'measurement': 'foundations_%s_retracers_results' % environment, 'fields': { 'count': count, }, 'tags': { 'release': release, 'arch': arch, 'result': result, } }) if not dryrun: util.influxdb_insert(data)
def collect(team_name, dryrun=False): """Push upload data.""" date = datetime.now().date().strftime('%Y-%m-%d') results = generate_upload_report(date, team_name) print('%s: %s' % (date, results)) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'metric_uploads_%s' % team_name, 'fields': { 'dev': results['dev'], 'sru': results['sru'], } }] util.influxdb_insert(data)
def collect(project, repo='', dryrun=False, pkg_name=None): """Submit data to Push Gateway.""" print(project) if pkg_name is None: pkg_name = project project_new = lp.get_bug_count(project, status='New') project_total = lp.get_bug_count(project) ubuntu_new = lp.get_ubuntu_bug_count(pkg_name, status='New') ubuntu_total = lp.get_ubuntu_bug_count(pkg_name) reviews = lp.get_active_review_count(project) print('%s total bugs (%s new)' % (project_total, project_new)) print('%s pkg bugs (%s new)' % (ubuntu_total, ubuntu_new)) contrib = util.get_contributors(repo) contrib_internal = [x for x in contrib if x.endswith('@canonical.com')] contrib_external = [x for x in contrib if not x.endswith('@canonical.com')] print('Total Contributors: %s' % len(contrib)) print('Total Internal Contributors: %s' % len(contrib_internal)) print('Total External Contributors: %s' % len(contrib_external)) if not dryrun: print('Pushing data...') pkg_str = project.replace('-', '') data = [{ 'measurement': 'pkg_%s' % pkg_str, 'fields': { 'bug_total': project_total - project_new, 'bug_new': project_new, 'bug_ubuntu_total': ubuntu_total - ubuntu_new, 'bug_ubuntu_new': ubuntu_new, 'review_total': reviews, 'contrib_total': len(contrib), 'contrib_external_total': len(contrib_external), 'contrib_internal_total': len(contrib_internal), } }] util.influxdb_insert(data)
def csv2influx(csv_filename, measurement): """ Push CSV data to InfluxDB. @param csv_filename: csv filename to load into InfluxDB @param measurement: measurement name to use for data """ data = [] with open(csv_filename) as csv_file: reader = csv.DictReader(csv_file) for row in reader: date = row.pop('date') entry = { "measurement": measurement, "fields": dict(row), "time": date } data.append(entry) util.influxdb_insert(data)
def collect(dryrun=False): """Push published cloud image counts.""" metrics = [] interesting_images = filter_interesting_images() aws_clouds = ifilter('cloudname ~ ^aws') not_aws_clouds = -aws_clouds print('Finding serials for non-aws clouds...') metrics += collect_metrics(not_aws_clouds, interesting_images) print('Finding serials for AWS clouds...') # These virt/storage combinations were present in early xenial development # dailies, but were dropped before release. aws_deprecated = (ifilter('release = xenial') & ifilter('virt ~ ^(hvm|pv)$') & ifilter('root_store ~ ^(io1|ebs)$')) metrics += collect_metrics(aws_clouds, interesting_images & -aws_deprecated) print('Finding serials for docker-core...') docker_core_serials = get_current_download_serials(DOCKER_CORE_ROOT) for release, serial in docker_core_serials.items(): age = _determine_serial_age(serial) print('Found {} latest serial: {} ({} days old)'.format( release, serial, age)) tags = dict(image_type='daily', cloud='docker-core', release=release) metrics += [ _emit_metric('current_serial', serial, **tags), _emit_metric('current_serial_age', age, **tags) ] if not dryrun: print('Pushing data...') util.influxdb_insert(metrics) else: import pprint pprint.pprint(metrics)
def collect(environment, dryrun=False): """Collect and push retracers results metrics.""" base_errors_url = BASE_ERRORS_URL if environment == 'staging': base_errors_url = base_errors_url.replace('errors.', 'errors.staging.') retrace_time_json = get_rtime_data(base_errors_url) if len(retrace_time_json['objects']) == 0: print("No retracing has occurred") sys.exit(1) if retrace_time_json['objects'][0]['date'] != YESTERDAY.strftime('%Y%m%d'): print("The results are not for today, quitting.") sys.exit(1) results = retrace_time_json['objects'][0]['value'] for release in results: data = [] for arch in results[release]: time = results[release][arch] if dryrun: print("%s %s: %s" % (release, arch, time)) continue data.append({ # we don't need per minute counts of results 'time': datetime(YESTERDAY.year, YESTERDAY.month, YESTERDAY.day), 'measurement': 'foundations_%s_retracers_avg_time' % environment, 'fields': { 'avg_retrace_time': time, }, 'tags': { 'release': release, 'arch': arch, } }) if not dryrun: util.influxdb_insert(data)
def collect(dryrun=False): """Collect and push uploader-related metrics.""" canonical, noncanonical = per_affiliation_uploader_count() uploaders = main_universe_uploader_count() print('Active Canonical Uploaders: %s' % canonical) print('Active Non-Canonical Uploaders: %s' % noncanonical) print('Current Users with Main/Universe Upload Rights: %s' % uploaders) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'foundations_active_contributors', 'fields': { 'canonical-uploaders': canonical, 'non-canonical-uploaders': noncanonical, 'main-universe-uploaders': uploaders, } }] util.influxdb_insert(data)
def collect(team_name, dryrun=False): """Submit data to Push Gateway.""" lp_team_name = util.get_launchpad_team_name(team_name) triage = lp.get_team_daily_triage_count(team=lp_team_name, distro='Ubuntu', blacklist=BLACKLIST) backlog = lp.get_team_backlog_count(team=lp_team_name, distro='Ubuntu') print('Backlog Total: %s' % backlog) print('Triage Total: %s' % triage) if not dryrun: print('Pushing data...') data = [{ 'measurement': 'metric_triage', 'fields': { 'backlog': backlog, 'triage': triage, } }] util.influxdb_insert(data)
def collect(dryrun=False): """Submit data to Push Gateway.""" latest_release_prefix = _get_latest_release_prefix() counts = {} for tag in TAGS: counts[tag] = _get_tag_counts(latest_release_prefix, tag) print(counts) if not dryrun: print('Pushing data...') data = [] for tag in TAGS: for team_name in counts[tag]: data.append({ 'measurement': 'distro_rls_bug_tasks', 'fields': { 'count': int(counts[tag][team_name]) }, 'tags': { 'team_name': team_name, 'tag': tag } }) util.influxdb_insert(data)
def csv2influx(csv_filename, measurement, use_tags=None, value_type=None): """ Push CSV data to InfluxDB. @param csv_filename: csv filename to load into InfluxDB @param measurement: measurement name to use for data @param use_tags: use these columns as tags, not value keys @param value_type: cast value columns to this type """ data = [] value_type = _parse_value_type(value_type or 'int') with open(csv_filename) as csv_file: reader = csv.DictReader(csv_file) for row in reader: date = row.pop('date') try: tags = {k: row.pop(k) for k in use_tags} if use_tags else {} fields = {k: value_type(v) if v else 0 for k, v in dict(row).items()} except TypeError: print('Unknown value (not an int) on this row:') print(row) sys.exit(1) entry = { "measurement": measurement, "fields": fields, "tags": tags, "time": date } data.append(entry) util.influxdb_insert(data) print('wrote {} datapoints'.format(len(data)))
# Median age of packages waiting in devel-proposed 'median_age': median_age, # Size of devel-proposed backlog (packages x days) 'backlog': backlog, }, } return metric if __name__ == '__main__': PARSER = argparse.ArgumentParser() PARSER.add_argument('--dryrun', action='store_true') PARSER.add_argument('--team', help='team_name') ARGS = PARSER.parse_args() logging.basicConfig(level=logging.DEBUG) TEAM = ARGS.team try: DATA = get_proposed_migration_queue(TEAM) finally: if not ARGS.dryrun: print('Pushing data...') util.influxdb_insert([DATA]) else: print('Valid candidates: %i' % DATA['fields']['valid_candidates']) print('Not considered candidates: %i' % DATA['fields']['not_considered']) print('Median age: %i' % DATA['fields']['median_age']) print('Backlog: %i' % DATA['fields']['backlog'])
def collect(dryrun=False): # pylint: disable=too-many-branches """Collect and push SRU-related metrics.""" data = [] sru_queues = sru_queue_count() ready_srus = sru_verified_and_ready_count() proposed_sru_age_data = proposed_package_ages() unapproved_sru_age_data = unapproved_sru_ages() q_name = 'Proposed Uploads in the Unapproved Queue per Series' print('Number of %s:' % q_name) for series, count in sru_queues.items(): print('%s: %s' % (series, count)) print('Age in days of oldest %s:' % q_name.replace('Uploads', 'Upload')) for series in unapproved_sru_age_data: print('%s: %s' % (series, unapproved_sru_age_data[series]['oldest_age_in_days'])) print('Backlog age in days of %s:' % q_name) for series in unapproved_sru_age_data: print('%s: %s' % (series, unapproved_sru_age_data[series]['ten_day_backlog_age'])) print('Number of backlogged %s:' % q_name) for series in unapproved_sru_age_data: print('%s: %s' % (series, unapproved_sru_age_data[series]['ten_day_backlog_count'])) topic = 'Updates in Proposed per Series' print('Number of Publishable %s:' % topic) for series, count in ready_srus.items(): print('%s: %s' % (series, count)) for category in ('unverified', 'verified', 'vfailed'): print('Number of backlogged %s %s:' % (category, topic)) for series in proposed_sru_age_data: print('%s: %s' % (series, proposed_sru_age_data[series] ['fourteen_day_%s_backlog_count' % category])) print('Backlog age in days of %s %s:' % (category, topic)) for series in proposed_sru_age_data: print('%s: %s' % (series, proposed_sru_age_data[series] ['fourteen_day_%s_backlog_age' % category])) if not dryrun: print('Pushing data...') for series, count in sru_queues.items(): data.append({ 'measurement': 'distro_sru_unapproved_proposed_count', 'fields': { 'count': count }, 'tags': { 'series': series } }) for series in unapproved_sru_age_data: data.append({ 'measurement': 'distro_sru_unapproved_proposed_oldest_age', 'fields': { 'oldest_age_in_days': unapproved_sru_age_data[series]['oldest_age_in_days'] }, 'tags': { 'series': series } }) for series in unapproved_sru_age_data: data.append({ 'measurement': 'distro_sru_unapproved_proposed_ten_day_backlog_age', 'fields': { 'ten_day_backlog_age': unapproved_sru_age_data[series]['ten_day_backlog_age'] }, 'tags': { 'series': series } }) for series in unapproved_sru_age_data: data.append({ 'measurement': 'distro_sru_unapproved_proposed_ten_day_backlog_count', 'fields': { 'ten_day_backlog_count': unapproved_sru_age_data[series] ['ten_day_backlog_count'] }, 'tags': { 'series': series } }) for series, count in ready_srus.items(): data.append({ 'measurement': 'distro_sru_verified_and_ready_count', 'fields': { 'count': count }, 'tags': { 'series': series } }) for cat in ('unverified', 'verified', 'vfailed'): for series in proposed_sru_age_data: data.append({ 'measurement': 'distro_sru_proposed_fourteen_day_backlog_age', 'fields': { 'backlog_age': proposed_sru_age_data[series] ['fourteen_day_%s_backlog_age' % cat], 'backlog_count': proposed_sru_age_data[series] ['fourteen_day_%s_backlog_count' % cat] }, 'tags': { 'series': series, 'category': cat } }) util.influxdb_insert(data)
# Number of packages waiting in devel-proposed 'valid_candidates': valid, 'not_considered': not_considered, # Median age of packages waiting in devel-proposed 'median_age': median_age, # Size of devel-proposed backlog (packages x days) 'backlog': backlog, }, }) if __name__ == '__main__': PARSER = argparse.ArgumentParser() PARSER.add_argument('--dryrun', action='store_true') ARGS = PARSER.parse_args() logging.basicConfig(level=logging.DEBUG) DATA = [] try: get_proposed_migration_queue(DATA) finally: if ARGS.dryrun: print('Valid candidates: %i' % DATA[0]['fields']['valid_candidates']) print('Not considered candidates: %i' % DATA[0]['fields']['not_considered']) print('Median age: %i' % DATA[0]['fields']['median_age']) print('Backlog: %i' % DATA[0]['fields']['backlog']) else: util.influxdb_insert(DATA)
def collect(dryrun=False): """Collect data and push to InfluxDB.""" data = list(_get_data_points()) if not dryrun: print('Pushing data...') util.influxdb_insert(data)