Example #1
0
def getSprocDataByTags():
    sql = """select tm_tag_id , sum("yaxis") AS "yaxis_t" , sum("yaxis2") AS "yaxis_c", "xaxis"  from (
 select group_date(sp_timestamp,30) as "xaxis",
        sp_sproc_id,
        max(sp_self_time) - min(sp_self_time) as "yaxis",
        max(sp_calls) - min(sp_calls) as "yaxis2"
   from monitor_data.sproc_performance_data
  where sp_timestamp > 'now'::timestamp - '9 days'::interval
  group by sp_sproc_id , group_date(sp_timestamp,30) ) data,
  monitor_data.sprocs,
  monitor_data.tag_members
  where sprocs.sproc_id = sp_sproc_id
    and tm_sproc_name = sproc_name
    and tm_schema = get_noversion_name(sproc_schema)
  group by tm_tag_id , "xaxis" order by 4 asc;"""


    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)

    data = collections.defaultdict(list)

    for r in cur:
        data[r['tm_tag_id']].append((r['xaxis'], r['yaxis_t'], r['yaxis_c']))

    cur.close()
    DataDB.closeDataConnection(conn)

    return data
Example #2
0
def getSprocDataByTags():
    sql = """select tm_tag_id , sum("yaxis") AS "yaxis_t" , sum("yaxis2") AS "yaxis_c", "xaxis"  from (
 select group_date(sp_timestamp,30) as "xaxis",
        sp_sproc_id,
        max(sp_self_time) - min(sp_self_time) as "yaxis",
        max(sp_calls) - min(sp_calls) as "yaxis2"
   from monitor_data.sproc_performance_data
  where sp_timestamp > 'now'::timestamp - '9 days'::interval
  group by sp_sproc_id , group_date(sp_timestamp,30) ) data,
  monitor_data.sprocs,
  monitor_data.tag_members
  where sprocs.sproc_id = sp_sproc_id
    and tm_sproc_name = sproc_name
    and tm_schema = get_noversion_name(sproc_schema)
  group by tm_tag_id , "xaxis" order by 4 asc;"""

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)

    data = collections.defaultdict(list)

    for r in cur:
        data[r['tm_tag_id']].append((r['xaxis'], r['yaxis_t'], r['yaxis_c']))

    cur.close()
    DataDB.closeDataConnection(conn)

    return data
Example #3
0
def getSingleSprocData(name, hostId=1, interval=None, sprocNr=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleSprocSQL(name, hostId, interval, sprocNr))

    data = {
        'calls': [],
        'self_time': [],
        'total_time': [],
        'avg_time': [],
        'avg_self_time': [],
        'name': name
    }

    for r in cur:
        data['calls'].append((r['xaxis'], r['d_calls']))
        data['total_time'].append((r['xaxis'], r['d_total_time']))
        data['self_time'].append((r['xaxis'], r['d_self_time']))
        data['avg_time'].append((r['xaxis'], r['d_avg_time']))
        data['avg_self_time'].append((r['xaxis'], r['d_avg_self_time']))

    cur.close()
    DataDB.closeDataConnection(conn)

    return data
Example #4
0
def getTablePerformanceIssues(hostname, date_from, date_to):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cur.execute(
        """select * from monitor_data.get_table_threshold_sinners_for_period(%s,%s,%s)""",
        (hostname, date_from, date_to))
    data = []  # cur.fetchall()
    for r in cur:
        row = {
            'host_name': r['host_name'],
            'host_id': r['host_id'],
            'schema_name': r['schema_name'],
            'table_name': r['table_name'],
            'day': r['day'],
            'scan_change_pct': r['scan_change_pct'],
            'scans1': r['scans1'],
            'scans2': r['scans2'],
            'size1': r['size1'],
            'size2': r['size2'],
            'size_change_pct': r['size_change_pct'],
            'allowed_seq_scan_pct': r['allowed_seq_scan_pct'],
        }
        data.append(row)
    cur.close()
    conn.close()
    return data
Example #5
0
def getTop10Interval(order=avgRuntimeOrder,interval=None,hostId = 1, limit = 10):

    sql = """select regexp_replace("name", E'(\\\\(.*\\\\))','()') AS "name",
                    round( sum(d_calls) , 0 ) AS "calls",
                    round( sum(d_total_time) , 0 ) AS "totalTime",
                    round( sum(d_total_time) / sum(d_calls) , 0 ) AS "avgTime"
               from ( """ + getSQL(interval, hostId) + """) tt
              where d_calls > 0
              group by "name"
              order by """+order+"""  limit """ + str(adapt(limit))

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)

    sprocs = []

    for record in cur:
        record['avgTime'] = makeTimeIntervalReadable(record['avgTime'])
        record['totalTime'] = makeTimeIntervalReadable(record['totalTime'])
        sprocs.append(record)

    conn.close()

    return sprocs
Example #6
0
def getTop10Interval(order=avgRuntimeOrder,interval=None,hostId = 1, limit = 10):

    sql = """select regexp_replace("name", E'(\\\\(.*\\\\))','()') AS "name",
                    round( sum(d_calls) , 0 ) AS "calls",
                    round( sum(d_total_time) , 0 ) AS "totalTime",
                    round( sum(d_total_time) / sum(d_calls) , 0 ) AS "avgTime"
               from ( """ + getSQL(interval, hostId) + """) tt
              where d_calls > 0
              group by "name"
              order by """+order+"""  limit """ + str(adapt(limit))

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)

    sprocs = []

    for record in cur:
        record['avgTime'] = makeTimeIntervalReadable(record['avgTime'])
        record['totalTime'] = makeTimeIntervalReadable(record['totalTime'])
        sprocs.append(record)

    conn.close()

    return sprocs
Example #7
0
def getLoadReportData():
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute("""select load_host_id AS id,
                          extract(week from load_timestamp)::text AS kw,
                          round(avg(load_1min_value)/100,2) AS avg,
                          round(max(load_1min_value)/100,2) AS max,
                          to_char(min(load_timestamp::date),'dd.mm.YYYY') AS min_date,
                          to_char(max(load_timestamp::date),'dd.mm.YYYY') AS max_date,
                          min(load_timestamp::date) AS sort_date
                     from monitor_data.host_load , monitor_data.hosts
                    where host_id = load_host_id
                      and host_enabled
                      and load_timestamp > ('now'::timestamp - '9 weeks'::interval)
                      and extract(dow from load_timestamp) IN(1,2,3,4,5)
                    group by load_host_id, extract(week from load_timestamp)
                    order by 1 ASC,7 DESC""")

    data = defaultdict(list)

    lastAvg = None
    lastMax = None
    lastId = 0

    lastRR = None

    for r in cur:

        rr = {'id' : r['id'],
              'avg' : r['avg'],
              'max' : r['max'],
              'min_date' : r['min_date'],
              'max_date' : r['max_date'],
              'trendAvg': 0,
              'trendMax': 0,
              'kw' : r['kw']
              }

        if lastRR != None and lastRR['id']==rr['id']:
            if lastRR['max'] < r['max']:
                lastRR['trendMax'] = -1
            elif lastRR['max'] > r['max']:
                lastRR['trendMax'] = 1

            if lastRR['avg'] < r['avg']:
                lastRR['trendAvg'] = -1
            elif lastRR['avg'] > r['avg']:
                lastRR['trendAvg'] = 1

        data[int(r['id'])].append(rr);
        lastRR = rr

    cur.close()
    conn.close()

    return sorted(data.values(), key = lambda x : hosts.hosts[x[0]['id']]['settings']['uiShortName'])
Example #8
0
def getLoadReportData():
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute("""select load_host_id AS id,
                          extract(week from load_timestamp)::text AS kw,
                          round(avg(load_1min_value)/100,2) AS avg,
                          round(max(load_1min_value)/100,2) AS max,
                          to_char(min(load_timestamp::date),'dd.mm.YYYY') AS min_date,
                          to_char(max(load_timestamp::date),'dd.mm.YYYY') AS max_date,
                          min(load_timestamp::date) AS sort_date
                     from monitor_data.host_load , monitor_data.hosts
                    where host_id = load_host_id
                      and host_enabled
                      and load_timestamp > ('now'::timestamp - '9 weeks'::interval)
                      and extract(dow from load_timestamp) IN(1,2,3,4,5)
                    group by load_host_id, extract(week from load_timestamp)
                    order by 1 ASC,7 DESC""")

    data = defaultdict(list)

    lastRR = None

    for r in cur:

        rr = {
            'id': r['id'],
            'avg': r['avg'],
            'max': r['max'],
            'min_date': r['min_date'],
            'max_date': r['max_date'],
            'trendAvg': 0,
            'trendMax': 0,
            'kw': r['kw']
        }

        if lastRR != None and lastRR['id'] == rr['id']:
            if lastRR['max'] < r['max']:
                lastRR['trendMax'] = -1
            elif lastRR['max'] > r['max']:
                lastRR['trendMax'] = 1

            if lastRR['avg'] < r['avg']:
                lastRR['trendAvg'] = -1
            elif lastRR['avg'] > r['avg']:
                lastRR['trendAvg'] = 1

        data[int(r['id'])].append(rr)
        lastRR = rr

    cur.close()
    conn.close()

    return sorted(
        data.values(),
        key=lambda x: hosts.hosts[x[0]['id']]['settings']['uiShortName'])
Example #9
0
def load_filter_lines(host_id, _filter = None, interval = None):

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(get_filted_query(host_id,_filter,interval))
    l = []
    for row in cur:
        l.append ( ( row['xaxis'], row['yaxis'] ) )

    return l
Example #10
0
def getTableData(host, name, interval = None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleTableSql(host,name,interval))

    d = { 'table_size' : [], 'index_size' : [], 'seq_scans': [], 'index_scans' : [], 'ins':[], 'upd':[], 'del':[], 'hot':[] }

    last_is = None
    last_ss = None

    last_ins = None
    last_del = None
    last_upd = None
    last_hot = None
    last_timestamp = 0

    for r in cur:
        d['table_size'].append ( ( r['tsd_timestamp'] , r['tsd_table_size'] ) )
        d['index_size'].append ( ( r['tsd_timestamp'] , r['tsd_index_size'] ) )

        if int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
            if last_ss != None:
                d['seq_scans'].append  ( ( r['tsd_timestamp'] , r['tsd_seq_scans']-last_ss ) )

            if last_is != None:
                d['index_scans'].append( ( r['tsd_timestamp'] , r['tsd_index_scans'] - last_is ) )

            if last_ins != None and last_ins != 0:
                d['ins'].append( ( r['tsd_timestamp'] , r['tsd_tup_ins'] - last_ins ) )

            if last_del != None and last_del != 0:
                d['del'].append( ( r['tsd_timestamp'] , r['tsd_tup_del'] - last_del ) )

            if last_upd != None and last_upd != 0:
                d['upd'].append( ( r['tsd_timestamp'] , r['tsd_tup_upd'] - last_upd ) )

            if last_hot != None and last_hot != 0:
                d['hot'].append( ( r['tsd_timestamp'] , r['tsd_tup_hot_upd'] - last_hot ) )

        last_is = r['tsd_index_scans']
        last_ss = r['tsd_seq_scans']

        last_ins = r['tsd_tup_ins']
        last_del = r['tsd_tup_del']
        last_upd = r['tsd_tup_upd']
        last_hot = r['tsd_tup_hot_upd']

        last_timestamp = int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return d
Example #11
0
def load_filter_lines(host_id, _filter=None, interval=None):

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(get_filted_query(host_id, _filter, interval))
    l = []
    for row in cur:
        l.append((row['xaxis'], row['yaxis']))

    return l
Example #12
0
def getTableData(host, name, interval = None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleTableSql(host,name,interval))

    d = { 'table_size' : [], 'index_size' : [], 'seq_scans': [], 'index_scans' : [], 'ins':[], 'upd':[], 'del':[], 'hot':[] }

    last_is = None
    last_ss = None

    last_ins = None
    last_del = None
    last_upd = None
    last_hot = None
    last_timestamp = 0

    for r in cur:
        d['table_size'].append ( ( r['tsd_timestamp'] , r['tsd_table_size'] ) )
        d['index_size'].append ( ( r['tsd_timestamp'] , r['tsd_index_size'] ) )

        if int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
            if last_ss != None:
                d['seq_scans'].append  ( ( r['tsd_timestamp'] , r['tsd_seq_scans']-last_ss ) )

            if last_is != None:
                d['index_scans'].append( ( r['tsd_timestamp'] , r['tsd_index_scans'] - last_is ) )

            if last_ins != None and last_ins != 0:
                d['ins'].append( ( r['tsd_timestamp'] , r['tsd_tup_ins'] - last_ins ) )

            if last_del != None and last_del != 0:
                d['del'].append( ( r['tsd_timestamp'] , r['tsd_tup_del'] - last_del ) )

            if last_upd != None and last_upd != 0:
                d['upd'].append( ( r['tsd_timestamp'] , r['tsd_tup_upd'] - last_upd ) )

            if last_hot != None and last_hot != 0:
                d['hot'].append( ( r['tsd_timestamp'] , r['tsd_tup_hot_upd'] - last_hot ) )

        last_is = r['tsd_index_scans']
        last_ss = r['tsd_seq_scans']

        last_ins = r['tsd_tup_ins']
        last_del = r['tsd_tup_del']
        last_upd = r['tsd_tup_upd']
        last_hot = r['tsd_tup_hot_upd']

        last_timestamp = int(time.mktime(r['tsd_timestamp'].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return d
Example #13
0
def getGroupsData():
    conn = DataDB.getDataConnection()
    groups = {}
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);

    cur.execute("SELECT * FROM monitor_data.host_groups;")
    for g in cur:
        groups [ g['group_id'] ] = g['group_name']

    cur.close();
    conn.close();
    return groups;
Example #14
0
def getHostData():
    conn = DataDB.getDataConnection()
    hosts = {}

    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);

    cur.execute("SELECT * FROM monitor_data.hosts WHERE host_enabled = true ORDER BY host_id ASC;")
    for r in cur:
        rr = dict(r);
        rr['settings'] = json.loads(rr['host_settings'])
        hosts[rr['host_id']] = rr;

    cur.close();
    conn.close();
    return hosts;
Example #15
0
def getHostData():
    conn = DataDB.getDataConnection()
    hosts = {}

    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor);

    cur.execute("SELECT * FROM monitor_data.hosts WHERE host_enabled = true ORDER BY host_id ASC;")
    for r in cur:
        rr = dict(r)
        rr['settings'] = json.loads(rr['host_settings'])
        rr['uishortname'] = rr['settings']['uiShortName'].lower().replace('-','')
        rr['uilongname'] = rr['settings']['uiLongName']
        hosts[rr['host_id']] = rr

    cur.close()
    conn.close()
    return hosts
Example #16
0
def getTableIOData(host, name, interval=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleTableIOSql(host, name, interval))

    d = {'heap_read': [], 'heap_hit': [], 'index_read': [], 'index_hit': []}

    last_hr = None
    last_hh = None
    last_ir = None
    last_ih = None
    last_timestamp = 0

    for r in cur:

        if int(time.mktime(r['tio_timestamp'].timetuple()) *
               1000) - last_timestamp <= (15 * 60 * 1000):
            if last_hr != None:
                d['heap_read'].append(
                    (r['tio_timestamp'], r['tio_heap_read'] - last_hr))

            if last_hh != None:
                d['heap_hit'].append(
                    (r['tio_timestamp'], r['tio_heap_hit'] - last_hh))

            if last_ir != None:
                d['index_read'].append(
                    (r['tio_timestamp'], r['tio_idx_read'] - last_ir))

            if last_ih != None:
                d['index_hit'].append(
                    (r['tio_timestamp'], r['tio_idx_hit'] - last_ih))

        last_hr = r['tio_heap_read']
        last_hh = r['tio_heap_hit']
        last_ir = r['tio_idx_read']
        last_ih = r['tio_idx_hit']

        last_timestamp = int(
            time.mktime(r['tio_timestamp'].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return d
Example #17
0
def getSingleSprocData(name, hostId=1, interval=None, sprocNr = None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute( getSingleSprocSQL(name, hostId, interval, sprocNr ) )

    data = { 'calls' : [], 'self_time': [], 'total_time' : [] , 'avg_time' : [] , 'avg_self_time': [] , 'name' : name }

    for r in cur:
        data['calls'].append( ( r['xaxis'] , r['d_calls'] ) )
        data['total_time'].append ( ( r['xaxis'] , r['d_total_time'] ) )
        data['self_time'].append ( ( r['xaxis'] , r['d_self_time'] ) )
        data['avg_time'].append ( ( r['xaxis'] , r['d_avg_time'] ) )
        data['avg_self_time'].append ( ( r['xaxis'] , r['d_avg_self_time'] ) )

    cur.close()
    DataDB.closeDataConnection(conn)

    return data
Example #18
0
def getCpuLoad(hostId=1):
    load = { "load_15min_avg" : [] , "load_15min_max" : [] }

    sql = """ SELECT date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval AS load_timestamp,
                     AVG(load_1min_value) AS load_15min_avg,
                     MAX(load_1min_value) AS load_15min_max
                FROM monitor_data.host_load WHERE load_host_id = """ + str(adapt(hostId)) + """ AND load_timestamp > ('now'::timestamp - '9 days'::interval)
                GROUP BY date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval
                ORDER BY 1 ASC """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)
    for record in cur:
        load['load_15min_avg'].append( (record['load_timestamp'] , round( float(record['load_15min_avg'])/100,2) ) )
        load['load_15min_max'].append( (record['load_timestamp'] , round( float(record['load_15min_max'])/100,2) ) )

    return load
Example #19
0
def getCpuLoad(hostId=1):
    load = { "load_15min_avg" : [] , "load_15min_max" : [] }

    sql = """ SELECT date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval AS load_timestamp,
                     AVG(load_1min_value) AS load_15min_avg,
                     MAX(load_1min_value) AS load_15min_max
                FROM monitor_data.host_load WHERE load_host_id = """ + str(adapt(hostId)) + """ AND load_timestamp > ('now'::timestamp - '9 days'::interval)
                GROUP BY date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval
                ORDER BY 1 ASC """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)
    for record in cur:
        load['load_15min_avg'].append( (record['load_timestamp'] , round( float(record['load_15min_avg'])/100,2) ) )
        load['load_15min_max'].append( (record['load_timestamp'] , round( float(record['load_15min_max'])/100,2) ) )

    return load
Example #20
0
def getWalVolumes(hostId=1):
    load = { "wal_15min_growth" : []}

    sql = """
            SELECT 
                date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval AS load_timestamp,
                coalesce(max(xlog_location_mb)-min(xlog_location_mb),0)  AS wal_15min_growth
            FROM monitor_data.host_load WHERE load_host_id = """ + str(adapt(hostId)) + """ AND load_timestamp > ('now'::timestamp - '9 days'::interval)
            GROUP BY date_trunc('hour'::text, load_timestamp) + floor(date_part('minute'::text, load_timestamp) / 15::double precision) * '00:15:00'::interval
            ORDER BY 1 ASC
            """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(sql)
    for record in cur:
        load['wal_15min_growth'].append( (record['load_timestamp'] , record['wal_15min_growth'] ) )

    return load
Example #21
0
def getSprocsOrderedBy( hostId, order = " ORDER BY SUM(delta_total_time) DESC"):
    sql = """SELECT sproc_name
               FROM ( """ + viewSprocs() + """ ) t JOIN monitor_data.sprocs ON sp_sproc_id = sproc_id
               WHERE sproc_host_id = """ + str(hostId) + """
               GROUP BY sproc_name
             """ + order + """;
          """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    list= []
    cur.execute( sql )

    for r in cur:
        list.append ( r['sproc_name'] )

    cur.close()
    DataDB.closeDataConnection(conn)
    return list
Example #22
0
def getTableIOData(host, name, interval = None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleTableIOSql(host,name,interval))

    d = { 'heap_read' : [], 'heap_hit' : [], 'index_read' : [], 'index_hit': [] }

    last_hr = None
    last_hh = None
    last_ir = None
    last_ih = None
    last_timestamp = 0

    for r in cur:

        if int(time.mktime(r['tio_timestamp'].timetuple()) * 1000) - last_timestamp <= ( 15*60*1000 ):
            if last_hr != None:
                d['heap_read'].append(( r['tio_timestamp'] , r['tio_heap_read'] - last_hr ))

            if last_hh != None:
                d['heap_hit'].append(( r['tio_timestamp'] , r['tio_heap_hit'] - last_hh ))

            if last_ir != None:
                d['index_read'].append(( r['tio_timestamp'] , r['tio_idx_read'] - last_ir ))

            if last_ih != None:
                d['index_hit'].append(( r['tio_timestamp'] , r['tio_idx_hit'] - last_ih ))

        last_hr = r['tio_heap_read']
        last_hh = r['tio_heap_hit']
        last_ir = r['tio_idx_read']
        last_ih = r['tio_idx_hit']

        last_timestamp = int(time.mktime(r['tio_timestamp'].timetuple()) * 1000)


    cur.close()
    DataDB.closeDataConnection(conn)

    return d
Example #23
0
def getSprocsOrderedBy(hostId, order=" ORDER BY SUM(delta_total_time) DESC"):
    sql = """SELECT sproc_name
               FROM ( """ + viewSprocs(
    ) + """ ) t JOIN monitor_data.sprocs ON sp_sproc_id = sproc_id
               WHERE sproc_host_id = """ + str(adapt(hostId)) + """
               GROUP BY sproc_name
             """ + order + """;
          """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    list = []
    cur.execute(sql)

    for r in cur:
        list.append(r['sproc_name'])

    cur.close()
    DataDB.closeDataConnection(conn)
    return list
Example #24
0
def getApiPerformanceIssues(hostname, api_from, api_to):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cur.execute(
        """select * from monitor_data.get_sproc_threshold_sinners_for_release(%s,%s,%s)""",
        (hostname, api_from, api_to))
    data = []  # cur.fetchall()
    for r in cur:
        row = {
            'host_name':
            r['host_name'],
            'host_id':
            r['host_id'],
            'sproc_schema':
            r['sproc_schema'],
            'sproc_name':
            r['sproc_name'],
            'calltime_change_pct':
            r['calltime_change_pct'],
            'share_on_total_runtime':
            r['share_on_total_runtime'],
            'execution_avg1':
            r['execution_avg1'],
            'execution_avg2':
            r['execution_avg2'],
            'calls1':
            r['calls1'],
            'calls2':
            r['calls2'],
            'callscount_change_pct':
            r['callscount_change_pct'],
            'allowed_runtime_growth_pct':
            r['allowed_runtime_growth_pct'],
            'allowed_share_on_total_runtime_pct':
            r['allowed_share_on_total_runtime_pct'],
        }
        data.append(row)
    cur.close()
    conn.close()
    return data
Example #25
0
def getLoad(hostId=1):
    sql = """select xaxis, sum(d_self_time) OVER (ORDER BY xaxis ASC ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) / (1*15*60*1000) AS load_15min,
                           sum(d_self_time) OVER (ORDER BY xaxis ASC ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) / (4*15*60*1000) AS load_1hour
               from ( select xaxis,sum(d_self_time) d_self_time from (""" + getSQL("('now'::timestamp - '9 days'::interval)" ,hostId) + """) dataTabel group by xaxis ) loadTable """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    load = { 'load_15min' : [], 'load_1hour': [] }

    print ( sql )

    cur.execute(sql)
    lastTime = None
    skip15min=0
    skip1h=0

    for record in cur:
        currentTime = int(time.mktime(record['xaxis'].timetuple()) * 1000)
        if lastTime != None:
            if currentTime - lastTime > ( 15 * 60 * 1000):
                skip15min = 2
                skip1h=5

        if skip15min>0:
            skip15min -= 1
        else:
            load['load_15min'].append((record['xaxis'], round ( record['load_15min'], 2 ) ) )

        if skip1h > 0:
            skip1h -= 1
        else:
            load['load_1hour'].append((record['xaxis'], round ( record['load_1hour'] , 2 )))

        lastTime = int(time.mktime(record['xaxis'].timetuple()) * 1000)

    cur.close()
    conn.close()

    return load
Example #26
0
def getLoad(hostId=1):
    sql = """select xaxis, sum(d_self_time) OVER (ORDER BY xaxis ASC ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) / (1*15*60*1000) AS load_15min,
                           sum(d_self_time) OVER (ORDER BY xaxis ASC ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) / (4*15*60*1000) AS load_1hour
               from ( select xaxis,sum(d_self_time) d_self_time from (""" + getSQL("('now'::timestamp - '9 days'::interval)" ,hostId) + """) dataTabel group by xaxis ) loadTable """

    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    load = { 'load_15min' : [], 'load_1hour': [] }

    cur.execute(sql)
    lastTime = None
    skip15min=0
    skip1h=0

    for record in cur:
        currentTime = int(time.mktime(record['xaxis'].timetuple()) * 1000)
        if lastTime != None:
            if currentTime - lastTime > ( 15 * 60 * 1000):
                skip15min = 2
                skip1h=5

        if skip15min>0:
            skip15min -= 1
        else:
            load['load_15min'].append((record['xaxis'], round ( record['load_15min'], 2 ) ) )

        if skip1h > 0:
            skip1h -= 1
        else:
            load['load_1hour'].append((record['xaxis'], round ( record['load_1hour'] , 2 )))

        lastTime = int(time.mktime(record['xaxis'].timetuple()) * 1000)

    cur.close()
    conn.close()

    return load
Example #27
0
def getTablePerformanceIssues(hostname, date_from, date_to):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cur.execute("""select * from monitor_data.get_table_threshold_sinners_for_period(%s,%s,%s)""", (hostname, date_from, date_to))
    data = [] # cur.fetchall()
    for r in cur:
        row = {'host_name' : r['host_name'],
              'host_id' : r['host_id'],
              'schema_name' : r['schema_name'],
              'table_name' : r['table_name'],
              'day' : r['day'],
              'scan_change_pct' : r['scan_change_pct'],
              'scans1': r['scans1'],
              'scans2': r['scans2'],
              'size1': r['size1'],
              'size2': r['size2'],
              'size_change_pct': r['size_change_pct'],
              'allowed_seq_scan_pct': r['allowed_seq_scan_pct'],
              }
        data.append(row)
    cur.close()
    conn.close()
    return data
Example #28
0
def getApiPerformanceIssues(hostname, api_from, api_to):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    cur.execute("""select * from monitor_data.get_sproc_threshold_sinners_for_release(%s,%s,%s)""", (hostname, api_from, api_to))
    data = [] # cur.fetchall()
    for r in cur:
        row = {'host_name' : r['host_name'],
              'host_id' : r['host_id'],
              'sproc_schema' : r['sproc_schema'],
              'sproc_name' : r['sproc_name'],
              'calltime_change_pct' : r['calltime_change_pct'],
              'share_on_total_runtime' : r['share_on_total_runtime'],
              'execution_avg1': r['execution_avg1'],
              'execution_avg2': r['execution_avg2'],
              'calls1': r['calls1'],
              'calls2': r['calls2'],
              'callscount_change_pct': r['callscount_change_pct'],
              'allowed_runtime_growth_pct': r['allowed_runtime_growth_pct'],
              'allowed_share_on_total_runtime_pct': r['allowed_share_on_total_runtime_pct'],
              }
        data.append(row)
    cur.close()
    conn.close()
    return data
Example #29
0
def getLoadReportData(hostId=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    query = """
            with q as (
                select 
                  t_host_id
                , week
                , (select round((sum(tsd_table_size) + sum(tsd_table_size))/10^9::numeric,1)::text
                     from monitor_data.table_size_data
                    where tsd_timestamp = max) as db_size 
                from (
                    select
                        t_host_id
                      , extract(week from tsd_timestamp) as week               
                      , max(tsd_timestamp)
                    from  monitor_data.table_size_data
                        , monitor_data.tables
                    where tsd_timestamp > ('now'::timestamp - '9 weeks'::interval)
                    and tsd_table_id = t_id
                    and (%s is null or t_host_id = %s)
                    group by t_host_id, extract(week from tsd_timestamp)
                ) a
            )
            select 
                  load_host_id AS id,
                  extract(week from load_timestamp)::text AS kw,
                  round(avg(load_1min_value)/100,2) AS avg,
                  round(max(load_1min_value)/100,2) AS max,
                  to_char(min(load_timestamp::date),'dd.mm.YYYY') AS min_date,
                  to_char(max(load_timestamp::date),'dd.mm.YYYY') AS max_date,
                  min(load_timestamp::date) AS sort_date,
                  max(q.db_size) as db_size,
                  round((max(xlog_location_mb) - min(xlog_location_mb)) / 1000.0, 1)  as wal_written
             from monitor_data.host_load
                , monitor_data.hosts
                , q
            where host_id = load_host_id
              and host_enabled
              and load_timestamp > ('now'::timestamp - '9 weeks'::interval)
              and extract(dow from load_timestamp) IN(1,2,3,4,5)                      
              and q.t_host_id = load_host_id
              and q.week = extract(week from load_timestamp)
              and (%s is null or host_id = %s)
            group by load_host_id, extract(week from load_timestamp)
            order by 1 ASC,7 DESC
            """
    cur.execute(query, (hostId,hostId,hostId,hostId))

    data = defaultdict(list)

    lastRR = None

    for r in cur:

        rr = {'id' : r['id'],
              'avg' : r['avg'],
              'max' : r['max'],
              'min_date' : r['min_date'],
              'max_date' : r['max_date'],
              'db_size' : r['db_size'],
              'wal_written' : r['wal_written'],
              'trendAvg': 0,
              'trendMax': 0,
              'kw' : r['kw']
              }

        if lastRR != None and lastRR['id']==rr['id']:
            if lastRR['max'] < r['max']:
                lastRR['trendMax'] = -1
            elif lastRR['max'] > r['max']:
                lastRR['trendMax'] = 1

            if lastRR['avg'] < r['avg']:
                lastRR['trendAvg'] = -1
            elif lastRR['avg'] > r['avg']:
                lastRR['trendAvg'] = 1

            if lastRR['db_size'] < r['db_size']:
                lastRR['trendSize'] = -1
            elif lastRR['db_size'] > r['db_size']:
                lastRR['trendSize'] = 1

            if lastRR['wal_written'] < r['wal_written']:
                lastRR['trendWal'] = -1
            elif lastRR['wal_written'] > r['wal_written']:
                lastRR['trendWal'] = 1

        data[int(r['id'])].append(rr);
        lastRR = rr

    cur.close()
    conn.close()

    return sorted(data.values(), key = lambda x : hosts.hosts[x[0]['id']]['settings']['uiShortName'])
Example #30
0
def getTableData(host, name, interval=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSingleTableSql(host, name, interval))

    d = {
        "table_size": [],
        "index_size": [],
        "seq_scans": [],
        "index_scans": [],
        "ins": [],
        "upd": [],
        "del": [],
        "hot": [],
    }

    last_is = None
    last_ss = None

    last_ins = None
    last_del = None
    last_upd = None
    last_hot = None
    last_timestamp = 0

    for r in cur:
        d["table_size"].append((r["tsd_timestamp"], r["tsd_table_size"]))
        d["index_size"].append((r["tsd_timestamp"], r["tsd_index_size"]))

        if int(time.mktime(r["tsd_timestamp"].timetuple()) * 1000) - last_timestamp <= (15 * 60 * 1000):
            if last_ss != None:
                d["seq_scans"].append((r["tsd_timestamp"], r["tsd_seq_scans"] - last_ss))

            if last_is != None:
                d["index_scans"].append((r["tsd_timestamp"], r["tsd_index_scans"] - last_is))

            if last_ins != None and last_ins != 0:
                d["ins"].append((r["tsd_timestamp"], r["tsd_tup_ins"] - last_ins))

            if last_del != None and last_del != 0:
                d["del"].append((r["tsd_timestamp"], r["tsd_tup_del"] - last_del))

            if last_upd != None and last_upd != 0:
                d["upd"].append((r["tsd_timestamp"], r["tsd_tup_upd"] - last_upd))

            if last_hot != None and last_hot != 0:
                d["hot"].append((r["tsd_timestamp"], r["tsd_tup_hot_upd"] - last_hot))

        last_is = r["tsd_index_scans"]
        last_ss = r["tsd_seq_scans"]

        last_ins = r["tsd_tup_ins"]
        last_del = r["tsd_tup_del"]
        last_upd = r["tsd_tup_upd"]
        last_hot = r["tsd_tup_hot_upd"]

        last_timestamp = int(time.mktime(r["tsd_timestamp"].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return d
Example #31
0
def getTopTables(hostId=1, limit=10, order=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    if limit == None:
        limit = ""
    else:
        limit = """ LIMIT """ + str(adapt(limit))

    if order == None:
        order = 2

    order = {
        1:
        "ORDER BY schema ASC,name ASC ",
        2:
        "ORDER BY table_size DESC",
        3:
        "ORDER BY table_size - min_table_size DESC",
        4:
        "ORDER BY CASE WHEN min_table_size > 0 THEN table_size::float / min_table_size ELSE 0 END DESC",
        5:
        "ORDER BY index_size DESC",
        6:
        "ORDER BY index_size - min_index_size DESC",
        7:
        "ORDER BY CASE WHEN min_index_size > 0 THEN index_size::float / min_index_size ELSE 0 END DESC"
    }[int(order)]

    cur.execute("""SELECT MAX(tsd_timestamp) AS max_date
                     FROM monitor_data.table_size_data
                    WHERE tsd_table_id IN ( SELECT t_id FROM monitor_data.tables WHERE t_host_id = """
                + str(adapt(hostId)) + """)
                      AND tsd_timestamp > current_date - 1""")

    maxTime = None
    for record in cur:
        maxTime = record['max_date']

    if maxTime == None:
        return []

    sql = """SELECT * FROM ( SELECT t_schema AS schema,
                          t_name AS name,
                          tsd_table_size AS table_size,
                          tsd_index_size AS index_size,
                          COALESCE ( ( SELECT MIN(tsd_table_size) FROM monitor_data.table_size_data st WHERE td.tsd_table_id = st.tsd_table_id AND st.tsd_timestamp > ('now'::timestamp - '7 days'::interval) ), 0) AS min_table_size,
                          COALESCE ( ( SELECT MIN(tsd_index_size) FROM monitor_data.table_size_data st WHERE td.tsd_table_id = st.tsd_table_id AND st.tsd_timestamp > ('now'::timestamp - '7 days'::interval) ), 0) AS min_index_size

                     FROM monitor_data.table_size_data td
                     JOIN monitor_data.tables ON t_id = td.tsd_table_id
                    WHERE td.tsd_timestamp = """ + str(
        adapt(maxTime)) + """ AND t_host_id = """ + str(
            adapt(hostId)) + """ ) _t """ + order + """ """ + limit

    cur.execute(sql)

    list = []
    for r in cur:

        d = {}
        for k in r.keys():
            d[k] = r[k]

        d['table_size_pretty'] = makePrettySize(r['table_size'])
        d['index_size_pretty'] = makePrettySize(r['index_size'])
        d['table_size_delta'] = makePrettySize(r['table_size'] -
                                               r['min_table_size'])
        d['index_size_delta'] = makePrettySize(r['index_size'] -
                                               r['min_index_size'])
        if r['min_table_size'] > 0:
            d['growth'] = round(
                ((float(r['table_size']) / r['min_table_size']) - 1) * 100, 1)
        else:
            d['growth'] = 0

        if r['min_index_size'] > 0:
            d['growth_index'] = round(
                ((float(r['index_size']) / r['min_index_size']) - 1) * 100, 1)
        else:
            d['growth_index'] = 0

        list.append(d)

    return list
Example #32
0
def getDatabaseSizes(host_id = None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSizeTrendSQL(host_id))
    size_data = {}

    current_host = 0

    last_timestamp = None

    for record in cur:

        if record['t_host_id'] != current_host:
            current_host = record['t_host_id']
            set_ins = False
            set_del = False
            set_upd = False

            l_ins = None
            l_upd = None
            l_del = None
            last_timestamp = None

        if last_timestamp == None:
            last_timestamp = int(time.mktime(record['tsd_timestamp'].timetuple()) * 1000)

        if not record['t_host_id'] in size_data:
            size_data[record['t_host_id']] = { 'size' : [] , 'ins': [], 'upd': [], 'del':[] }

        """ exclude 0 values, otherwise there is a big peak at start, with wraparound this should be ok"""

        if not set_ins and record['s_ins']!=0:
            l_ins = record['s_ins']
            set_ins = True

        if not set_upd and record['s_upd']!=0:
            l_upd = record['s_upd']
            set_upd = True

        if not set_del and record['s_del']!=0:
            l_del = record['s_del']
            set_del = True

        if l_ins == None:
            l_ins = record['s_ins']

        if l_upd == None:
            l_upd = record['s_upd']

        if l_del == None:
            l_del = record['s_del']

        size_data[record['t_host_id']]['size'].append( ( record['tsd_timestamp'] , record['size'] ) )
        size_data[record['t_host_id']]['ins'].append( ( record['tsd_timestamp'] , max( record['s_ins'] - l_ins , 0)  ) )
        size_data[record['t_host_id']]['del'].append( ( record['tsd_timestamp'] , max( record['s_del'] - l_del , 0)  ) )
        size_data[record['t_host_id']]['upd'].append( ( record['tsd_timestamp'] , max( record['s_upd'] - l_upd , 0)  ) )

        l_ins = record['s_ins']
        l_upd = record['s_upd']
        l_del = record['s_del']

        last_timestamp = int(time.mktime(record['tsd_timestamp'].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return size_data
Example #33
0
def getTopTables(hostId=1, limit=10, order=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    if limit == None:
        limit = ""
    else:
        limit = """ LIMIT """ + str(adapt(limit))

    if order == None:
        order = 2

    order = { 1: "ORDER BY schema ASC,name ASC ",
              2: "ORDER BY table_size DESC" ,
              3: "ORDER BY table_size - min_table_size DESC",
              4: "ORDER BY CASE WHEN min_table_size > 0 THEN table_size::float / min_table_size ELSE 0 END DESC",
              5: "ORDER BY index_size DESC",
              6: "ORDER BY index_size - min_index_size DESC",
              7: "ORDER BY CASE WHEN min_index_size > 0 THEN index_size::float / min_index_size ELSE 0 END DESC" }[int(order)]

    cur.execute("""SELECT MAX(tsd_timestamp) AS max_date
                     FROM monitor_data.table_size_data
                    WHERE tsd_table_id IN ( SELECT t_id FROM monitor_data.tables WHERE t_host_id = """+str(adapt(hostId))+""")
                      AND tsd_timestamp > current_date - 1""")

    maxTime = None
    for record in cur:
        maxTime = record['max_date']

    if maxTime == None:
        return []

    sql = """SELECT * FROM ( SELECT t_schema AS schema,
                          t_name AS name,
                          tsd_table_size AS table_size,
                          tsd_index_size AS index_size,
                          COALESCE ( ( SELECT MIN(tsd_table_size) FROM monitor_data.table_size_data st WHERE td.tsd_table_id = st.tsd_table_id AND st.tsd_timestamp > ('now'::timestamp - '7 days'::interval) ), 0) AS min_table_size,
                          COALESCE ( ( SELECT MIN(tsd_index_size) FROM monitor_data.table_size_data st WHERE td.tsd_table_id = st.tsd_table_id AND st.tsd_timestamp > ('now'::timestamp - '7 days'::interval) ), 0) AS min_index_size

                     FROM monitor_data.table_size_data td
                     JOIN monitor_data.tables ON t_id = td.tsd_table_id
                    WHERE td.tsd_timestamp = """ + str(adapt(maxTime)) + """ AND t_host_id = """ + str(adapt(hostId)) + """ ) _t """ + order + """ """ + limit

    cur.execute( sql )

    list = []
    for r in cur:

        d = {}
        for k in r.keys():
            d[k] = r[k]

        d['table_size_pretty'] = makePrettySize( r['table_size'] )
        d['index_size_pretty'] = makePrettySize( r['index_size'] )
        d['table_size_delta'] = makePrettySize( r['table_size'] - r['min_table_size'] )
        d['index_size_delta'] = makePrettySize( r['index_size'] - r['min_index_size'] )
        if r['min_table_size'] > 0:
            d['growth'] = round( ( ( float(r['table_size']) / r['min_table_size'] ) - 1) * 100 , 1 )
        else:
            d['growth'] = 0

        if r['min_index_size'] > 0:
            d['growth_index'] = round( ( ( float(r['index_size']) / r['min_index_size']) - 1) * 100 , 1 )
        else:
            d['growth_index'] = 0

        list.append(d)

    return list
Example #34
0
def getDatabaseSizes(host_id=None):
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    cur.execute(getSizeTrendSQL(host_id))
    size_data = {}

    current_host = 0

    last_timestamp = None

    for record in cur:

        if record['t_host_id'] != current_host:
            current_host = record['t_host_id']
            set_ins = False
            set_del = False
            set_upd = False

            l_ins = None
            l_upd = None
            l_del = None
            last_timestamp = None

        if last_timestamp == None:
            last_timestamp = int(
                time.mktime(record['tsd_timestamp'].timetuple()) * 1000)

        if not record['t_host_id'] in size_data:
            size_data[record['t_host_id']] = {
                'size': [],
                'ins': [],
                'upd': [],
                'del': []
            }
        """ exclude 0 values, otherwise there is a big peak at start, with wraparound this should be ok"""

        if not set_ins and record['s_ins'] != 0:
            l_ins = record['s_ins']
            set_ins = True

        if not set_upd and record['s_upd'] != 0:
            l_upd = record['s_upd']
            set_upd = True

        if not set_del and record['s_del'] != 0:
            l_del = record['s_del']
            set_del = True

        if l_ins == None:
            l_ins = record['s_ins']

        if l_upd == None:
            l_upd = record['s_upd']

        if l_del == None:
            l_del = record['s_del']

        size_data[record['t_host_id']]['size'].append(
            (record['tsd_timestamp'], record['size']))
        size_data[record['t_host_id']]['ins'].append(
            (record['tsd_timestamp'], max(record['s_ins'] - l_ins, 0)))
        size_data[record['t_host_id']]['del'].append(
            (record['tsd_timestamp'], max(record['s_del'] - l_del, 0)))
        size_data[record['t_host_id']]['upd'].append(
            (record['tsd_timestamp'], max(record['s_upd'] - l_upd, 0)))

        l_ins = record['s_ins']
        l_upd = record['s_upd']
        l_del = record['s_del']

        last_timestamp = int(
            time.mktime(record['tsd_timestamp'].timetuple()) * 1000)

    cur.close()
    DataDB.closeDataConnection(conn)

    return size_data
Example #35
0
def getLoadReportData():
    conn = DataDB.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    query = """
            with q as (
                select 
                  t_host_id
                , week
                , (select round((sum(tsd_table_size) + sum(tsd_table_size))/10^9::numeric,1)::text
                     from monitor_data.table_size_data
                    where tsd_timestamp = max) as db_size 
                from (
                    select
                        t_host_id
                      , extract(week from tsd_timestamp) as week               
                      , max(tsd_timestamp)
                    from  monitor_data.table_size_data
                        , monitor_data.tables
                    where tsd_timestamp > ('now'::timestamp - '9 weeks'::interval)
                    and tsd_table_id = t_id
                    group by t_host_id, extract(week from tsd_timestamp)
                ) a
            )
            select 
                  load_host_id AS id,
                  extract(week from load_timestamp)::text AS kw,
                  round(avg(load_1min_value)/100,2) AS avg,
                  round(max(load_1min_value)/100,2) AS max,
                  to_char(min(load_timestamp::date),'dd.mm.YYYY') AS min_date,
                  to_char(max(load_timestamp::date),'dd.mm.YYYY') AS max_date,
                  min(load_timestamp::date) AS sort_date,
                  max(q.db_size) as db_size
             from monitor_data.host_load
                , monitor_data.hosts
                , q
            where host_id = load_host_id
              and host_enabled
              and load_timestamp > ('now'::timestamp - '9 weeks'::interval)
              and extract(dow from load_timestamp) IN(1,2,3,4,5)                      
              and q.t_host_id = load_host_id
              and q.week = extract(week from load_timestamp)
            group by load_host_id, extract(week from load_timestamp)
            order by 1 ASC,7 DESC
            """
    cur.execute(query)

    data = defaultdict(list)

    lastRR = None

    for r in cur:

        rr = {
            'id': r['id'],
            'avg': r['avg'],
            'max': r['max'],
            'min_date': r['min_date'],
            'max_date': r['max_date'],
            'db_size': r['db_size'],
            'trendAvg': 0,
            'trendMax': 0,
            'kw': r['kw']
        }

        if lastRR != None and lastRR['id'] == rr['id']:
            if lastRR['max'] < r['max']:
                lastRR['trendMax'] = -1
            elif lastRR['max'] > r['max']:
                lastRR['trendMax'] = 1

            if lastRR['avg'] < r['avg']:
                lastRR['trendAvg'] = -1
            elif lastRR['avg'] > r['avg']:
                lastRR['trendAvg'] = 1

            if lastRR['db_size'] < r['db_size']:
                lastRR['trendSize'] = -1
            elif lastRR['db_size'] > r['db_size']:
                lastRR['trendSize'] = 1

        data[int(r['id'])].append(rr)
        lastRR = rr

    cur.close()
    conn.close()

    return sorted(
        data.values(),
        key=lambda x: hosts.hosts[x[0]['id']]['settings']['uiShortName'])