Esempio n. 1
0
def upsert_reading_data_subsamples(fill, data, mcs):
    conn = util.getConn()
    cur = conn.cursor()
    # TODO: Is this too slow?
    try:
        sql = '''DELETE FROM reading_data_subsample rds WHERE
        rds.bin_id = %s AND rds.read_type_id = %s AND rds.reading_subsample_id IN
        (SELECT id from reading_subsample rs where rs.datetime >= %s AND rs.datetime <= %s)'''
        cur.execute(sql, (fill['bin_id'], READ_TYPES['mcp'], data['datetime'][0], data['datetime'][-1]))
        dt5 = datetime.timedelta(minutes=5)
        for i in xrange(len(data['datetime'])):
            sql = '''SELECT update_reading_data_subsample(%s, %s, %s, %s, %s);'''
            cur.execute(sql, (data['datetime'][i], fill['bin_id'], MCP_SUBSECTION, READ_TYPES['mcp'], mcs[i]))
            if i != len(data['datetime'])-1:
                cur.execute(sql, (data['datetime'][i]-dt5, fill['bin_id'], MCP_SUBSECTION, READ_TYPES['mcp'], mcs[i]))
                cur.execute(sql, (data['datetime'][i]+dt5, fill['bin_id'], MCP_SUBSECTION, READ_TYPES['mcp'], mcs[i]))

        conn.commit()
        cur.close()
        conn.close()
        cur = None
        conn = None
    finally:
        if cur:
            cur.close()
        if conn:
            conn.rollback()
            conn.close()
Esempio n. 2
0
def r_diag_sensor_data_latest(con=None):
    sql = """SELECT
    sdl.value, sdl.raw_data, sdl.error_code, sdl.datetime,
    st.name as sensor_type_name, rt.name as read_type_name, rt.short_name as read_type_short_name, rt.units,
    d.name as device_name, d.info as device_info, dt.name as device_type_name, d.mid_name, d.port, d.address, d.bin_id, d.bin_section_id,
    b.name as bin_name, bs.name as bin_section_name
FROM
    sensor_data_latest sdl, sensor s, device d, bin b, bin_section bs, device_type dt, sensor_type st, read_type rt
WHERE
    sdl.datetime >= date_trunc('year', now()) AND sdl.datetime < date_trunc('year', now())+'1 years'::interval AND
    s.id = sdl.sensor_id AND d.id = s.device_id AND b.id = d.bin_id AND bs.id = d.bin_section_id AND
    dt.id = d.device_type_id AND st.id = s.sensor_type_id AND rt.id = st.read_type_id"""
    close_con = False
    try:
        if not con:
            con = util.getConn()
            close_con = True
        cur = con.cursor(cursor_factory=dbapi2extras.DictCursor)
        cur.execute(sql)
        results = []
        for row in cur:
            result_row = {}
            for key in row.keys():
                result_row[key] = row[key]
            results.append(result_row)
        cur.close()
        if close_con:
            con.close()
        return results
    finally:
        if close_con:
            con.close()
Esempio n. 3
0
def readings_last_time_delta():
    sample_period = request.params.get("sample_period")
    if not sample_period:
        sample_period = 5
    conn = util.getConn()
    cur = conn.cursor()
    cur.execute(
        'SELECT extract(epoch from (now()-datetime))/60 from reading_subsample WHERE sample_period = %s ORDER by datetime desc limit 1',
        (sample_period, ))
    row = cur.fetchone()
    if not row:
        cur.close()
        conn.close()
        abort(404, "Last reading not found. No readings in the database?")
    minutes = row[0]

    latest = r_diag_sensor_data_latest(conn)
    error_info = []
    now = util.getDateFromParam('now')
    for row in latest:
        if (now - row['datetime']).total_seconds() > 300:
            error_info.append({
                "bin_name": row['bin_name'],
                "bin_section_name": row['bin_section_name'],
                "device_name": row['device_name'],
                "sensor_type_name": row['sensor_type_name'],
                "port": row['port'],
                "address": row['address'],
                "delta": (now - row['datetime']).total_seconds()
            })
    return util.responseJSON({'td': minutes, "info": error_info})
Esempio n. 4
0
def sensor_last_error_code_count():
    sql = "SELECT count(error_code) FROM sensor_data WHERE error_code IS NOT NULL and datetime > now() - interval '10 minutes'"
    conn = util.getConn()
    cur = conn.cursor()
    cur.execute(sql)
    row = cur.fetchone()
    count = 0
    error_info = []
    if row:
        count = row[0]
        sql = """SELECT distinct ON (bin_name, bin_section_name, sensor_type_name, port, address) bin.name as bin_name,
            bin_section.name as bin_section_name,
            device.name as device_name,
            sensor_type.name as sensor_type_name,
            device.port,
            device.address
            FROM bin, bin_section, sensor, device, sensor_type, sensor_data
            WHERE sensor_data.error_code IS NOT NULL and sensor_data.datetime > now() - interval '10 minutes' and
            sensor.id = sensor_data.sensor_id and
            device.id=sensor.device_id and
            sensor_type.id=sensor.sensor_type_id and
            bin.id=device.bin_id and bin_section.id=device.bin_section_id"""
        cur.execute(sql)
        for row in cur:
            error_info.append({
                "bin_name": row[0],
                "bin_section_name": row[1],
                "device_name": row[2],
                "sensor_type_name": row[3],
                "port": row[4],
                "address": row[5]
            })
    return util.responseJSON({"error_code_count": count, "info": error_info})
Esempio n. 5
0
def get_fill_by_id(fill_id, conn=None):
    close_conn = False
    if not conn:
        conn = util.getConn()
        close_conn = True

    row = util.getRowFromTableById('fill', int(fill_id), conn=conn)
    if row:
        cur = conn.cursor()
        cur.execute(
            "SELECT mc, datetime FROM fill_during_mc WHERE fill_id = %s ORDER BY datetime",
            (fill_id, ))
        during_mc = cur.fetchall()
        row['during_mc'] = during_mc
        cur.close()

        sheller_windows = util.getRowsFromTable(
            'fill_sheller_window',
            extraWhere=" fill_id = %s ",
            extraArgs=(fill_id, ),
            orderStatement=" ORDER BY begin_datetime ",
            conn=conn)
        row['sheller_windows'] = sheller_windows
    if close_conn:
        conn.close()

    return row
Esempio n. 6
0
def subscription_event_delete(subscriber_id):
    conn = util.getConn()
    cur = conn.cursor()
    row = util.getRowsFromTable("subscription",
                                extraWhere="subscriber_id=%s",
                                extraArgs=(subscriber_id, ),
                                conn=conn)
    if not row:
        conn.close()
        abort(404, 'Subscriber not found')

    util.updateRowById("subscription",
                       row[0]["id"],
                       {"last_datetime": util.getDateFromParam("now")},
                       cursor=cur)

    rows = util.getRowsFromTable('subscription_event',
                                 extraWhere="subscriber_id=%s",
                                 extraArgs=(subscriber_id, ),
                                 conn=conn)
    if rows:
        cur.execute("DELETE FROM subscription_event where subscriber_id = %s",
                    (subscriber_id, ))
    conn.commit()
    conn.close()
    return util.responseJSON({"events": rows})
Esempio n. 7
0
def controls_last():
    sensor_id = request.params.get('sensor_id')
    if not sensor_id:
        abort(400, 'Missing sensor_id')
    sensor = util.getRowFromTableById("sensor",
                                      int(sensor_id),
                                      checkEnabled=True)
    if not sensor:
        abort(400, 'Invalid sensor_id')
    sensor_type = util.getRowFromTableById('sensor_type',
                                           sensor['sensor_type_id'])
    if not sensor_type['controllable']:
        abort(400, 'Sensor is not controllable')

    conn = util.getConn()
    cur = conn.cursor()
    cur.execute(
        "SELECT id FROM control WHERE sensor_id = %s ORDER BY posted_datetime desc limit 1",
        (sensor_id, ))
    row = cur.fetchone()

    if not row:
        cur.close()
        conn.close()
        return {'xlink': []}
    control_id = row[0]
    cur.close()
    conn.close()
    return {'xlink': ['/resources/controls/' + str(control_id)]}
Esempio n. 8
0
def computeMaxTempsDB(startDatetime,
                      endDatetime,
                      lutId,
                      startMC,
                      times,
                      conn=None):
    if not conn:
        conn = util.getConn()
    cur = conn.cursor()
    cur.execute(
        "SELECT begin_datetime, end_datetime FROM air_deduct WHERE " +
        "((begin_datetime >= %s AND begin_datetime <= %s) OR " +
        " (begin_datetime <= %s AND end_datetime IS NULL) OR " +
        "(begin_datetime <= %s AND end_datetime >= %s)) ",
        (startDatetime, endDatetime, startDatetime, startDatetime,
         startDatetime))
    deducts = cur.fetchall()

    cur.execute('SELECT hours_per_mc FROM mc_maxtemp_lut where id = %s',
                (lutId, ))
    row = cur.fetchone()
    if not row:
        return [None] * len(times)
    hours_per_mc = row[0]

    cur.execute(
        'SELECT mc, maxtemp FROM mc_maxtemp_lut_value WHERE mc_maxtemp_lut_id = %s ORDER BY mc DESC',
        (lutId, ))
    lut = cur.fetchall()

    return computeMaxTemps(startMC, startDatetime, deducts, times, lut,
                           hours_per_mc)
Esempio n. 9
0
def latex_create_fill_report_full(reportYear,
                                  fileNameBase=None,
                                  displayTZStr=None):
    #
    # create random file name, open for writing, and print file preamble stuff
    #
    if not fileNameBase:
        fileNameBase = tempfile.mktemp(prefix="fillRpt_full_", dir=".")
    filePath = "./gen_data/" + fileNameBase + ".tex"
    fh = open(filePath, 'w')
    fh.write(
        "\\documentclass{IsadoreReportFillFull}\n\\rptTitle{Fill report (full)}\n\\begin{document}\n\n"
    )
    #
    # get all fills (will filter by year later)
    #
    conn = util.getConn()
    # TODO: sort by fill number (in SQL?)
    # TODO: have DB only return fills from proper year to save CPU?
    dbFills = util.getRowsFromTable("fill", conn=conn)
    for dbFi, dbF in enumerate(dbFills):
        print dbFi, "/", len(dbFills)
        fillYear = dbF["air_begin_datetime"].year if dbF[
            "air_begin_datetime"] != None else dbF["filled_datetime"].year
        if fillYear == reportYear:
            #
            # create fill plot image
            #
            startDT = util.getStartAirDT(dbF)
            endDT = util.getStopAirDT(dbF)
            fillGraphPath = None
            try:
                fillGraphPath = fillReport.createFillGraph(
                    startDT, endDT, dbF["bin_id"], binSecID_upper,
                    binSecID_lower)
            except:
                # TODO: log error
                print "plot of fill data not created!"
            #
            # print the fill rpt page for the given fill if
            # the fill is from the correct year
            #
            fillReport.printFillPage(dbF, fh, fillGraphPath)
            fh.write("\n\\newpage\n\n")
            fh.write("%%% next fill %%%\n\n")
    conn.close()
    #
    # end writing to file
    #
    fh.write("\n\n\\end{document}")
    fh.close()
    #
    # latex
    #
    LatexFile(LATEX_IO_DIR, fileNameBase)
    #
    # return PDF
    #
    return fileNameBase
Esempio n. 10
0
def subscription_delete(subscriber_id):
    conn = util.getConn()
    cur = conn.cursor()
    cur.execute("DELETE FROM subscriber WHERE subscriber_id = %s",
                (subscriber_id, ))
    conn.commit()
    conn.close()
    return HTTPResponse(output="Subscription deleted.", status=204)
Esempio n. 11
0
def diag_sensor_data_latest_reset():
    conn = util.getConn()
    cur = conn.cursor()
    sql = "SELECT err_out_latest_sensor_data()"
    cur.execute(sql)
    conn.commit()
    cur.close()
    conn.close()
    return HTTPResponse(output="Sensor Data Latest Reset", status=204)
Esempio n. 12
0
def update_last_mc_predictions(bin_id, mc):
    conn = util.getConn()
    cur = conn.cursor()
    sql = '''DELETE FROM last_mc_prediction WHERE bin_id = %s'''
    cur.execute(sql, (bin_id,))
    sql = '''INSERT INTO last_mc_prediction (bin_id, value) VALUES (%s, %s)'''
    cur.execute(sql, (bin_id, mc))
    conn.commit()
    cur.close()
    conn.close()
Esempio n. 13
0
def luts_mc_maxtemp_getsingle(id):
    conn = util.getConn()
    lut = util.getRowFromTableById("mc_maxtemp_lut", id, conn=conn)
    values = util.getRowsFromTable("mc_maxtemp_lut_value",
                                   extraWhere="mc_maxtemp_lut_id=%s",
                                   extraArgs=(lut['id'], ),
                                   orderStatement=" ORDER BY mc ",
                                   conn=conn)
    lut["values"] = values
    return util.responseJSON(lut)
Esempio n. 14
0
def fill_new():
    # TODO: Check that fillNumber is not already taken for the year.
    # XXX: I have a feeling that this needs to be rewritten
    # get parameter values

    fill_number = request.params.get("fill_number", None)
    start_datetime = util.getDateFromParam(
        request.params.get("air_begin_datetime"))
    fill_datetime = util.getDateFromParam(request.params.get("start_datetime"))
    bin_id = request.params.get("bin_id", None)
    from_subscription_id = request.params.get("from_subscription_id", None)

    conn = util.getConn()
    cur = conn.cursor()

    # check if bin_id exists
    if not util.getRowFromTableById("bin", bin_id, conn=conn):
        conn.close()
        abort(400, "Bin id does not exist.")

    if (not fill_datetime and not start_datetime) or not fill_number:
        conn.close()
        abort(
            400,
            '(air_begin_datetime and start_datetime) and fill_number are required.'
        )

    # create new DB entry
    fill_id = util.insertRow("fill", {
        "fill_number": fill_number,
        "air_begin_datetime": start_datetime,
        "filled_datetime": fill_datetime,
        "bin_id": bin_id
    },
                             cursor=cur)

    if fill_datetime:
        year = fill_datetime.year
    else:
        year = start_datetime.year

    subscription_handler.add_event(
        {
            "key": 'fill',
            "year": year,
            "ids": [fill_id],
            "type": "add",
            "when": util.getDateFromParam("now")
        }, conn, cur, from_subscription_id)

    conn.commit()
    conn.close()
    # return the newly created fill's id url
    return {"xlink": ["/resources/data/fills/" + str(fill_id)]}
Esempio n. 15
0
def luts_mc_maxtemp_fast():
    conn = util.getConn()
    luts = util.getRowsFromTable("mc_maxtemp_lut", conn=conn)
    for lut in luts:
        values = util.getRowsFromTable("mc_maxtemp_lut_value",
                                       extraWhere="mc_maxtemp_lut_id=%s",
                                       extraArgs=(lut['id'], ),
                                       orderStatement=" ORDER BY mc ",
                                       conn=conn)
        lut["values"] = values
    return util.responseJSON({'luts': luts})
Esempio n. 16
0
def subscription_new():
    subscriber_id = str(uuid.uuid4())
    conn = util.getConn()
    cur = conn.cursor()
    sid = util.insertRow('subscription', {
        "subscriber_id": subscriber_id,
        "last_datetime": util.getDateFromParam('now')
    },
                         cursor=cur)
    #cur.execute("DELETE FROM subscription WHERE last_datetime < now() - interval '2 hours'")
    conn.commit()
    conn.close()
    return {"subscriber_id": subscriber_id}
Esempio n. 17
0
def add_event(event, conn=None, cur=None, from_subscription_id=None):
	logging.debug('add_event'+str(event))
	madeConn = False
	if not conn:
		madeConn = True
		conn = util.getConn()
		cur = conn.cursor()
	subs = get_subscribers(event, conn)
	for s in subs:
		if s != from_subscription_id:
			util.insertRow("subscription_event", {"subscriber_id": s, "event": json.dumps(event, default=util.customJSONHandler)}, cursor=cur)
	if madeConn:
		conn.commit()
		conn.close()
Esempio n. 18
0
def fill_list_fast():
    begin_span1 = util.getDateFromParam(request.params.get("begin_span1"))
    begin_span2 = util.getDateFromParam(request.params.get("begin_span2"))
    bin_id = request.params.get("bin_id", None)

    extra_where = ""
    extra_args = tuple()

    if bin_id:
        try:
            bin_id = int(bin_id)
        except:
            abort(400, 'Invalid bin_id')
        if extra_where:
            extra_where += " AND "
        extra_where += " bin_id = %s "
        extra_args += (bin_id, )

    if begin_span1 and begin_span2:
        if extra_where:
            extra_where += " AND "
        extra_where += "((air_begin_datetime >= %s AND air_begin_datetime <= %s) OR (filled_datetime >= %s AND filled_datetime <= %s) ) "
        extra_args += (begin_span1, begin_span2, begin_span1, begin_span2)

    conn = util.getConn()
    rows = util.getRowsFromTable(
        'fill',
        extraWhere=extra_where,
        extraArgs=extra_args,
        orderStatement=
        " ORDER by coalesce(filled_datetime, air_begin_datetime) ",
        conn=conn)
    cur = conn.cursor()
    for row in rows:
        cur.execute(
            "SELECT mc, datetime FROM fill_during_mc WHERE fill_id = %s ORDER BY datetime",
            (row["id"], ))
        during_mc = cur.fetchall()
        row['during_mc'] = during_mc
        sheller_windows = util.getRowsFromTable(
            'fill_sheller_window',
            extraWhere=" fill_id = %s ",
            extraArgs=(row['id'], ),
            orderStatement=" ORDER BY begin_datetime ",
            conn=conn)
        row['sheller_windows'] = sheller_windows
    cur.close()
    conn.close()

    return util.responseJSON({'fills': rows})
Esempio n. 19
0
import urllib2
import time
import socket
import threadpool
import util

from fetch2 import get_content

socket.setdefaulttimeout(5)

reload(sys)
sys.setdefaultencoding("utf-8")

UTFPattern = re.compile(r"charset=utf", re.I)

(conn, cursor) = util.getConn()

def bulk_crawl(r):
    ping = 10000
    url = r[0]
    
    print "url:" + url
    html = "ERROR"

    try:
        html = get_content(url)
    except Exception, e:
        pass
        # 毫秒为单位

    if (html == "ERROR"):
Esempio n. 20
0
#!/usr/bin/env python
#coding=utf-8
# @author gambol
# @date 2012-7-18
# 根据原则,给各个站点打分

import util
import sys

reload(sys)
sys.setdefaultencoding( "utf-8" )

conn, cursor = util.getConn()

def main():
    sql = "select s.id, s.name, s.line, s.description, s.url, s.title, s.category_id, ss.ping from tophey.server_info as s left join tophey.server_sys_info as ss on s.id = ss.id  where s.is_disabled = 0 and ss.name is not NULL;"
    cursor.execute(sql);
    result = cursor.fetchall()

    for r in result:
        s = score(r)
        storeScore(r[0], s)

def score(r):
    id = r[0]
    name = r[1]
    line = r[2]
    desc = r[3]
    url = r[4]
    title = r[5]
    category_id = r[6]