コード例 #1
0
ファイル: host.py プロジェクト: akrherz/rtstats
def handle_hourly(hostname, feedtype, since):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ""
    if feedtype != "":
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype,
        )
    tlimit = ""
    if since is not None:
        tlimit = (" and h.valid >= '%s' ") % (
            pd.to_datetime(since).strftime("%Y-%m-%d %H:%M+00"),
        )
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    select
    to_char(valid at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ'),
    h.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    min_latency,
    avg_latency,
    max_latency,
    nprods,
    nbytes,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from ldm_rtstats_hourly h JOIN ldm_feedtype_paths p on
    (h.feedtype_path_id = p.id) WHERE
    p.node_host_id = get_ldm_host_id(%s) """
        + flimit
        + """
    """
        + tlimit
        + """
    ORDER by h.valid ASC
    """,
        (hostname,),
    )
    utcnow = datetime.datetime.utcnow()
    res = dict()
    res["query_time[secs]"] = (utcnow - sts).total_seconds()
    res["hostname"] = hostname
    res["columns"] = [
        "valid",
        "feedtype_path_id",
        "origin",
        "relay",
        "min_latency",
        "avg_latency",
        "max_latency",
        "nprods",
        "nbytes",
        "feedtype",
    ]
    res["data"] = []
    for row in cursor:
        res["data"].append(row)
    return json.dumps(res)
コード例 #2
0
def handle_topology(feedtype, reverse=False):
    """Generate topology for this feedtype"""
    if feedtype == '':
        return json.dumps("NO_FEEDTYPE_SET_ERROR")
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    # compute all upstreams for this feedtype
    upstreams = {}
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    WITH active as (
        select distinct id from
        ldm_feedtype_paths p JOIN ldm_rtstats_hourly r
        on (p.id = r.feedtype_path_id)
        WHERE p.feedtype_id = get_ldm_feedtype_id(%s)
        and r.valid > (now() - '24 hours'::interval))
    select distinct
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    (select hostname from ldm_hostnames where id = p.node_host_id) as node
    from ldm_feedtype_paths p JOIN active a on (p.id = a.id)
    """, (feedtype, ))
    ets = datetime.datetime.utcnow()
    for row in cursor:
        if reverse:
            upstreams.setdefault(row[0], []).append(row[1])
        else:
            upstreams.setdefault(row[1], []).append(row[0])

    res = dict()
    res['query_time[secs]'] = (ets - sts).total_seconds()
    res['generation_time'] = ets.strftime("%Y-%m-%dT%H:%M:%SZ")
    res['feedtype'] = feedtype
    res['upstreams' if not reverse else 'downstreams'] = upstreams
    return json.dumps(res)
コード例 #3
0
def handle_rtstats(hostname, feedtype):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ''
    if feedtype != '':
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype, )
    cursor.execute(
        """
    select
    to_char(r.entry_added at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ'),
    r.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    avg_latency,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from ldm_rtstats r JOIN ldm_feedtype_paths p on
    (r.feedtype_path_id = p.id) WHERE
    p.node_host_id = get_ldm_host_id(%s) and
    r.entry_added > now() - '36 hours'::interval """ + flimit + """
    ORDER by r.entry_added ASC
    """, (hostname, ))
    res = dict()
    res['hostname'] = hostname
    res['columns'] = [
        'entry_added', 'feedtype_path_id', 'origin', 'relay', 'avg_latency',
        'feedtype'
    ]
    res['data'] = []
    for row in cursor:
        res['data'].append(row)
    return json.dumps(res)
コード例 #4
0
def stats():
    """Overview stats"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    sts = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
    if sts.minute < 20:  # Assume we have last hours stats by :20 after
        sts -= datetime.timedelta(hours=1)
    sts = sts.replace(minute=0, second=0, microsecond=0, tzinfo=pytz.utc)
    cursor.execute("""
        SELECT sum(nbytes)::bigint from ldm_rtstats_hourly
        WHERE valid = %s
    """, (sts,))
    nbytes = cursor.fetchone()[0]
    cursor.execute("""
        select count(distinct node_host_id)::bigint from
        ldm_rtstats_hourly h JOIN ldm_feedtype_paths p on
        (h.feedtype_path_id = p.id) WHERE h.valid = %s
    """, (sts,))
    hosts = cursor.fetchone()[0]
    res = dict()
    res['generation_time'] = datetime.datetime.utcnow(
        ).strftime("%Y-%m-%dT%H:%M:%SZ")
    res['data'] = [dict(valid=sts.strftime("%Y-%m-%dT%H:%M:00Z"),
                        nbytes=nbytes,
                        hosts=hosts)]

    return json.dumps(res)
コード例 #5
0
def run(feedtype):
    """Generate geojson for this feedtype"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    sts = datetime.datetime.utcnow()
    cursor.execute("""
    with data as (
        select relay_host_id, node_host_id,
        avg_latency,
        r.entry_added,
        rank() OVER
            (PARTITION by r.feedtype_path_id ORDER by r.entry_added DESC)
        from ldm_rtstats r JOIN ldm_feedtype_paths p
            on (r.feedtype_path_id = p.id)
        WHERE r.entry_added > now() - '15 minutes'::interval
        and p.feedtype_id = get_ldm_feedtype_id(%s)),
    agg1 as (
        SELECT relay_host_id, node_host_id, avg(avg_latency) as avg_latency,
        max(entry_added) as valid
        from data WHERE rank = 1 GROUP by relay_host_id, node_host_id),
    geos as (
        SELECT st_makeline(
    (SELECT geom from ldm_hostnames where id = a.relay_host_id),
    (SELECT geom from ldm_hostnames where id = a.node_host_id)) as geom,
    (SELECT hostname from ldm_hostnames where id = a.relay_host_id) as relay,
    (SELECT hostname from ldm_hostnames where id = a.node_host_id) as node,
        avg_latency,
        valid
        from agg1 a)

    SELECT ST_asGEoJSON(geom, 2), relay, node, avg_latency,
    to_char(valid at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') from geos
    WHERE geom is not null and ST_Length(geom) > 0.1
    """, (feedtype, ))
    utcnow = datetime.datetime.utcnow()
    res = {'type': 'FeatureCollection',
           'crs': {'type': 'EPSG',
                   'properties': {'code': 4326, 'coordinate_order': [1, 0]}},
           'features': [],
           'generation_time': utcnow.strftime("%Y-%m-%dT%H:%M:%SZ"),
           'query_time[secs]': (utcnow - sts).total_seconds(),
           'count': cursor.rowcount}
    for i, row in enumerate(cursor):
        if row[0] is None:
            continue
        res['features'].append(dict(type="Feature",
                                    id=i,
                                    properties=dict(
                                        latency=row[3],
                                        relay=row[1],
                                        node=row[2],
                                        utc_valid=row[4]
                                        ),
                                    geometry=json.loads(row[0])
                                    ))

    return json.dumps(res)
コード例 #6
0
def handle_weekly(hostname, feedtype, since):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ''
    if feedtype != '':
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype, )
    tlimit = ''
    if since is not None:
        tlimit = (" and h.valid >= '%s' ") % (
            pd.to_datetime(since).strftime("%Y-%m-%d"), )
    cursor.execute(
        """
    WITH weekly as (
        SELECT
        extract(year from valid) as yr, extract(week from valid) as week,
        feedtype_path_id,
        min(min_latency) as min_latency, avg(avg_latency) as avg_latency,
        max(max_latency) as max_latency, sum(nprods) as nprods,
        sum(nbytes) as nbytes,
        max(feedtype_id) as feedtype_id from
        ldm_rtstats_daily h JOIN ldm_feedtype_paths p on
            (h.feedtype_path_id = p.id) WHERE
        p.node_host_id = get_ldm_host_id(%s) """ + flimit + """
        """ + tlimit + """ GROUP by yr, week, feedtype_path_id)
    select
    to_char(
        (yr || '-01-01')::date + (week || ' weeks')::interval, 'YYYY-mm-dd')
        as v,
    h.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    min_latency,
    avg_latency,
    max_latency,
    nprods::bigint,
    nbytes::bigint,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from weekly h JOIN ldm_feedtype_paths p on
    (h.feedtype_path_id = p.id)
    ORDER by v ASC
    """, (hostname, ))
    res = dict()
    res['hostname'] = hostname
    res['columns'] = [
        'valid', 'feedtype_path_id', 'origin', 'relay', 'min_latency',
        'avg_latency', 'max_latency', 'nprods', 'nbytes', 'feedtype'
    ]
    res['data'] = []
    for row in cursor:
        res['data'].append(row)
    return json.dumps(res)
コード例 #7
0
def run():
    """Generate geojson for this feedtype"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    cursor.execute("""
    SELECT feedtype from ldm_feedtypes ORDER by feedtype
    """)
    res = dict(feedtypes=[])
    for row in cursor:
        res["feedtypes"].append(row[0])

    return json.dumps(res)
コード例 #8
0
ファイル: host.py プロジェクト: akrherz/rtstats
def handle_topology(hostname, feedtype):
    """Generate topology for this feedtype"""
    if feedtype == "":
        return json.dumps("NO_FEEDTYPE_SET_ERROR")
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    # compute all upstreams for this feedtype
    upstreams = {}
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    WITH active as (
        select distinct id from
        ldm_feedtype_paths p JOIN ldm_rtstats_hourly r
        on (p.id = r.feedtype_path_id)
        WHERE p.feedtype_id = get_ldm_feedtype_id(%s)
        and r.valid > (now() - '24 hours'::interval))
    select distinct
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    (select hostname from ldm_hostnames where id = p.node_host_id) as node
    from ldm_feedtype_paths p JOIN active a on (p.id = a.id)
    """,
        (feedtype,),
    )
    utcnow = datetime.datetime.utcnow()
    for row in cursor:
        upstreams.setdefault(row[1], []).append(row[0])

    if upstreams.get(hostname) is None:
        return json.dumps("NO_TOPOLOGY_ERROR")
    paths = [[hostname, x] for x in upstreams[hostname]]
    # print "inital upstreams are =>", paths
    depth = 2
    while depth < 10:
        newpaths = []
        for path in paths:
            if len(path) == depth:
                for up in upstreams.get(path[-1], []):
                    newpaths.append(path + [up])
        if len(newpaths) == 0:
            break
        paths = paths + newpaths
        # print("===== %s ====" % (depth,))
        # for path in paths:
        #    print(",".join(path))
        depth += 1
    res = dict()
    res["query_time[secs]"] = (utcnow - sts).total_seconds()
    res["hostname"] = hostname
    res["paths"] = paths
    return json.dumps(res)
コード例 #9
0
ファイル: hosts.py プロジェクト: akrherz/rtstats
def run(feedtype):
    """Generate geojson for this feedtype"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    sts = datetime.datetime.utcnow()
    flimiter = ""
    if feedtype is not None:
        flimiter = (" WHERE p.feedtype_id = get_ldm_feedtype_id('%s') ") % (
            feedtype,
        )
    cursor.execute(
        """
    WITH data as (
        SELECT distinct feedtype_path_id, version_id from ldm_rtstats_hourly
        WHERE valid > now() - '24 hours'::interval),
    agg1 as (
        SELECT distinct p.node_host_id, d.version_id from
        data d JOIN ldm_feedtype_paths p on (d.feedtype_path_id = p.id)
        """
        + flimiter
        + """)
    SELECT ST_asGeoJson(h.geom, 2), h.hostname, v.version
    from agg1 a1, ldm_versions v, ldm_hostnames h
    WHERE a1.node_host_id = h.id and v.id = a1.version_id
    ORDER by hostname ASC
    """
    )
    utcnow = datetime.datetime.utcnow()
    res = {
        "type": "FeatureCollection",
        "crs": {
            "type": "EPSG",
            "properties": {"code": 4326, "coordinate_order": [1, 0]},
        },
        "features": [],
        "query_time[secs]": (utcnow - sts).total_seconds(),
        "generation_time": utcnow.strftime("%Y-%m-%dT%H:%M:%SZ"),
        "count": cursor.rowcount,
    }
    for row in cursor:
        res["features"].append(
            dict(
                type="Feature",
                id=row[1],
                properties=dict(hostname=row[1], ldmversion=row[2]),
                geometry=(None if row[0] is None else json.loads(row[0])),
            )
        )

    return json.dumps(res)
コード例 #10
0
ファイル: host.py プロジェクト: akrherz/rtstats
def handle_rtstats(hostname, feedtype):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ""
    if feedtype != "":
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype,
        )
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    select
    to_char(r.entry_added at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ'),
    r.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    avg_latency,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from ldm_rtstats r JOIN ldm_feedtype_paths p on
    (r.feedtype_path_id = p.id) WHERE
    p.node_host_id = get_ldm_host_id(%s) and
    r.entry_added > now() - '36 hours'::interval """
        + flimit
        + """
    ORDER by r.entry_added ASC
    """,
        (hostname,),
    )
    utcnow = datetime.datetime.utcnow()
    res = dict()
    res["query_time[secs]"] = (utcnow - sts).total_seconds()
    res["hostname"] = hostname
    res["columns"] = [
        "entry_added",
        "feedtype_path_id",
        "origin",
        "relay",
        "avg_latency",
        "feedtype",
    ]
    res["data"] = []
    for row in cursor:
        res["data"].append(row)
    return json.dumps(res)
コード例 #11
0
def handle_hourly(hostname, feedtype, since):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ''
    if feedtype != '':
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype, )
    tlimit = ''
    if since is not None:
        tlimit = (" and h.valid >= '%s' ") % (
            pd.to_datetime(since).strftime("%Y-%m-%d %H:%M+00"), )
    cursor.execute(
        """
    select
    to_char(valid at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ'),
    h.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    min_latency,
    avg_latency,
    max_latency,
    nprods,
    nbytes,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from ldm_rtstats_hourly h JOIN ldm_feedtype_paths p on
    (h.feedtype_path_id = p.id) WHERE
    p.node_host_id = get_ldm_host_id(%s) """ + flimit + """
    """ + tlimit + """
    ORDER by h.valid ASC
    """, (hostname, ))
    res = dict()
    res['hostname'] = hostname
    res['columns'] = [
        'valid', 'feedtype_path_id', 'origin', 'relay', 'min_latency',
        'avg_latency', 'max_latency', 'nprods', 'nbytes', 'feedtype'
    ]
    res['data'] = []
    for row in cursor:
        res['data'].append(row)
    return json.dumps(res)
コード例 #12
0
def handle_feedtypes(hostname):
    """Generate geojson for this feedtype"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    select distinct f.feedtype from ldm_feedtypes f JOIN ldm_feedtype_paths p
    on (f.id = p.feedtype_id) WHERE p.node_host_id = get_ldm_host_id(%s)
    ORDER by f.feedtype
    """, (hostname, ))
    ets = datetime.datetime.utcnow()
    res = dict()
    res['query_time[secs]'] = (ets - sts).total_seconds()
    res['generation_time'] = ets.strftime("%Y-%m-%dT%H:%M:%SZ")
    res['hostname'] = hostname
    feedtypes = []
    for row in cursor:
        feedtypes.append(row[0])
    res['feedtypes'] = feedtypes
    return json.dumps(res)
コード例 #13
0
ファイル: host.py プロジェクト: akrherz/rtstats
def handle_weekly(hostname, feedtype, since):
    """Emit JSON for rtstats for this host"""
    pgconn = util.get_dbconn()
    cursor = pgconn.cursor()
    flimit = ""
    if feedtype != "":
        flimit = " and p.feedtype_id = get_ldm_feedtype_id('%s') " % (
            feedtype,
        )
    tlimit = ""
    if since is not None:
        tlimit = (" and h.valid >= '%s' ") % (
            pd.to_datetime(since).strftime("%Y-%m-%d"),
        )
    sts = datetime.datetime.utcnow()
    cursor.execute(
        """
    WITH weekly as (
        SELECT
        extract(isoyear from valid) as yr, extract(week from valid) as week,
        feedtype_path_id,
        min(min_latency) as min_latency, avg(avg_latency) as avg_latency,
        max(max_latency) as max_latency, sum(nprods) as nprods,
        sum(nbytes) as nbytes,
        max(feedtype_id) as feedtype_id from
        ldm_rtstats_daily h JOIN ldm_feedtype_paths p on
            (h.feedtype_path_id = p.id) WHERE
        p.node_host_id = get_ldm_host_id(%s) """
        + flimit
        + """
        """
        + tlimit
        + """ GROUP by yr, week, feedtype_path_id)
    select
    to_char(
        (yr || '-01-01')::date + (week || ' weeks')::interval, 'YYYY-mm-dd')
        as v,
    h.feedtype_path_id,
    (select hostname from ldm_hostnames where id = p.origin_host_id) as origin,
    (select hostname from ldm_hostnames where id = p.relay_host_id) as relay,
    min_latency,
    avg_latency,
    max_latency,
    nprods::bigint,
    nbytes::bigint,
    (select feedtype from ldm_feedtypes where id = p.feedtype_id) as feedtype
    from weekly h JOIN ldm_feedtype_paths p on
    (h.feedtype_path_id = p.id)
    ORDER by v ASC
    """,
        (hostname,),
    )
    utcnow = datetime.datetime.utcnow()
    res = dict()
    res["query_time[secs]"] = (utcnow - sts).total_seconds()
    res["hostname"] = hostname
    res["columns"] = [
        "valid",
        "feedtype_path_id",
        "origin",
        "relay",
        "min_latency",
        "avg_latency",
        "max_latency",
        "nprods",
        "nbytes",
        "feedtype",
    ]
    res["data"] = []
    for row in cursor:
        res["data"].append(row)
    return json.dumps(res)
コード例 #14
0
ファイル: agg_rtstats.py プロジェクト: oxelson/rtstats
def main():
    """Our workflow"""
    pgconn = util.get_dbconn(rw=True)
    hourly(pgconn)
    daily(pgconn)
    cleanup(pgconn)
コード例 #15
0
def main():
    pgconn = util.get_dbconn(rw=True)
    hourly(pgconn)
    daily(pgconn)
    cleanup(pgconn)