Esempio n. 1
0
def write_json_for_timestamp(timestamp):
    """
    Write a json file for the given timestamp and data.
    """
    data = get_data_for_timestamp(timestamp)
    jsonfile = conf.JSON_OUTPUT_DIR / '{}.json'.format(timestamp)
    with jsonfile.open('wb') as fh:
        json.dump(data, fh)

    # update the last processed timestamp for use in mrburns.
    redis.set(rkeys.LATEST_TIMESTAMP, timestamp)
    log.debug('Wrote file for {}'.format(timestamp))
    log.debug(jsonfile)
Esempio n. 2
0
def write_json_for_timestamp(timestamp):
    """
    Write a json file for the given timestamp and data.
    """
    data = get_data_for_timestamp(timestamp)
    filename = path.join(conf.JSON_OUTPUT_DIR, 'stats_{}.json'.format(timestamp))
    with open(filename, 'w') as fh:
        json.dump(data, fh)

    # update the last processed timestamp for use in mrburns.
    redis.set(rkeys.LATEST_TIMESTAMP, timestamp)
    log.debug('Wrote file for {}'.format(timestamp))
    log.debug(filename)
Esempio n. 3
0
def get_data_for_timestamp(timestamp):
    """
    Return aggregate map and share data dict for a timestamp.
    """
    issue_continents = get_issue_dict()
    issue_countries = get_issue_dict()
    data = {
        'map_total': int(redis.get(rkeys.MAP_TOTAL) or 0),
        'map_previous_total': int(redis.get(rkeys.MAP_TOTAL_SNAPSHOT) or 0),
        'map_geo': [],
        'share_total': int(redis.get(rkeys.SHARE_TOTAL) or 0),
        'continent_issues': {},
        'issue_continents': issue_continents,
        'country_issues': {},
        'issue_countries': issue_countries,
    }
    statsd.gauge('milhouse.map_total', data['map_total'])
    redis.set(rkeys.MAP_TOTAL_SNAPSHOT, data['map_total'])
    map_geo_key = rkeys.MAP_GEO.format(timestamp)
    geo_data = redis.hgetall(map_geo_key)
    for latlon, count in geo_data.iteritems():
        lat, lon = latlon.split(':')
        data['map_geo'].append({
            'lat': float(lat),
            'lon': float(lon),
            'count': int(count),
        })

    # CONTINENTS #
    continent_totals = redis.hgetall(rkeys.SHARE_CONTINENTS)
    continent_issues = data['continent_issues']
    for continent, count in continent_totals.iteritems():
        count = int(count)
        issues = redis.hgetall(rkeys.SHARE_CONTINENT_ISSUES.format(continent))
        continent_issues[continent] = {}
        for issue, issue_count in issues.iteritems():
            issue_count = int(issue_count)
            issue = data_types.types_map[issue]
            percent = get_percent(issue_count, count)
            continent_issues[continent][issue] = percent
            issue_continents[issue].append({
                'continent': continent,
                'count': percent,
            })

    # COUNTRIES #
    country_totals = redis.hgetall(rkeys.SHARE_COUNTRIES)
    country_issues = data['country_issues']
    for country, count in country_totals.iteritems():
        count = int(count)
        if count < conf.COUNTRY_MIN_SHARE:
            continue
        issues = redis.hgetall(rkeys.SHARE_COUNTRY_ISSUES.format(country))
        country_issues[country] = {}
        for issue, issue_count in issues.iteritems():
            issue_count = int(issue_count)
            issue = data_types.types_map[issue]
            percent = get_percent(issue_count, count)
            country_issues[country][issue] = percent
            issue_countries[issue].append({
                'country': country,
                'count': percent,
            })

    # GLOBAL #
    share_issues = redis.hgetall(rkeys.SHARE_ISSUES)
    share_total = data['share_total']
    global_issues = country_issues['GLOBAL'] = {}
    for issue, count in share_issues.iteritems():
        count = int(count)
        issue = data_types.types_map[issue]
        global_issues[issue] = get_percent(count, share_total)

    return data