Exemple #1
0
def WriteRawSmsToDb(msglist, sensor_nums):
    query = "INSERT INTO smsinbox (timestamp,sim_num,sms_msg,read_status,web_flag) VALUES "
    for m in msglist:
        if sensor_nums.find(m.simnum[-10:]) == -1:
            # if re.search(m.simnum[-10:],sensor_nums):
            web_flag = 'W'
            print m.data[:20]
            if cfg.config().mode.script_mode == 'gsmserver':
                ret = dsll.sendReceivedGSMtoDEWS(str(m.dt.replace("/", "-")),
                                                 m.simnum, m.data)

                #if the SMS Message was sent successfully to the web socket server then,
                #   change web_flag to 'WS' which means "Websocket Server Sent"
                if ret == 0:
                    web_flag = 'WSS'
        else:
            web_flag = 'S'
        query += "('%s','%s','%s','UNREAD','%s')," % (str(
            m.dt.replace("/", "-")), str(
                m.simnum), str(m.data.replace("'", "\"")), web_flag)
        # query += "('" + str(m.dt.replace("/","-")) + "','" + str(m.simnum) + "','"
        # query += str(m.data.replace("'","\"")) + "','UNREAD'),"

    # just to remove the trailing ','
    query = query[:-1]
    # print query

    dbio.commitToDb(query, "WriteRawSmsToDb", instance='GSM')
Exemple #2
0
def checkAlertMessage():
    c = cfg.config()
    dbio.createTable("runtimelog", "runtime")
    server.logRuntimeStatus("alert", "checked")

    alertfile = cfg.config().fileio.allalertsfile
    f = open(alertfile, 'r')
    alertmsg = f.read()
    f.close()

    print alertmsg
    if not alertmsg:
        print '>> No alert msg read.'
        return

    # write alert message to db
    server.writeAlertToDb(alertmsg)
Exemple #3
0
def CheckAlertMessages():
    c = cfg.config()
    alllines = ''
    print c.fileio.allalertsfile
    if os.path.isfile(c.fileio.allalertsfile) and os.path.getsize(
            c.fileio.allalertsfile) > 0:
        f = open(c.fileio.allalertsfile, 'r')
        alllines = f.read()
        f.close()
    else:
        print '>> Error in reading file', alllines
    return alllines
Exemple #4
0
def getAllowedPrefixes(network):
    c = cfg.config()
    if network.upper() == 'SMART':
        prefix_list = c.simprefix.smart.split(',')
    else:
        prefix_list = c.simprefix.globe.split(',')

    extended_prefix_list = []
    for p in prefix_list:
        extended_prefix_list.append("639" + p)
        extended_prefix_list.append("09" + p)

    return extended_prefix_list
Exemple #5
0
def SendAlertGsm(network, alertmsg):
    c = cfg.config()
    try:
        if network == 'GLOBE':
            numlist = c.simprefix.globe.split(",")
        else:
            numlist = c.simprefix.smart.split(",")
        # f = open(allalertsfile,'r')
        # alllines = f.read()
        # f.close()
        for n in numlist:
            gsmio.sendMsg(alertmsg, n)
    except IndexError:
        print "Error sending all_alerts.txt"
Exemple #6
0
def SendMessagesFromDb(network, limit=10):
    c = cfg.config()
    if not c.mode.sendmsg:
        return
    allmsgs = dbio.getAllOutboxSmsFromDb("UNSENT", network, limit)
    if len(allmsgs) <= 0:
        # print ">> No messages in outbox"
        return

    print ">> Sending messagess from db"

    msglist = []
    for item in allmsgs:
        smsItem = gsmio.sms(item[0], str(item[2]), str(item[3]), str(item[1]))
        msglist.append(smsItem)
    allmsgs = msglist

    send_success_list = []
    fail_success_list = []

    allowed_prefixes = getAllowedPrefixes(network)
    # cycle through all messages
    for msg in allmsgs:
        # get recepient numbers in list
        recepient_list = msg.simnum.split(",")
        for num in recepient_list:
            try:
                num_prefix = re.match("^ *((0)|(63))9\d\d", num).group()
                num_prefix = num_prefix.strip()
            except:
                continue
            # check if recepient number in allowed prefixed list
            if num_prefix in allowed_prefixes:
                ret = gsmio.sendMsg(msg.data, num.strip())
                if ret == 0:
                    send_success_list.append(msg.num)
                else:
                    fail_success_list.append(msg.num)
            else:
                print "Number not in prefix list", num_prefix

    dbio.setSendStatus("FAIL", fail_success_list)
    dbio.setSendStatus("SENT", send_success_list)

    #Get all outbox messages with send_status "SENT" and attempt to send
    #   chatterbox acknowledgements
    #   send_status will be changed to "SENT-WSS" if successful
    dsll.sendAllAckSentGSMtoDEWS()
Exemple #7
0
def logRuntimeStatus(script_name, status):
    if (status == 'alive'):
        ts = dt.today()
        diff = (ts.minute % 10) * 60 + ts.second
        ts = ts - td(seconds=diff)
        logtimestamp = ts.strftime("%Y-%m-%d %H:%M:00")
    else:
        logtimestamp = dt.today().strftime("%Y-%m-%d %H:%M:00")

    print ">> Logging runtime '" + status + "' at " + logtimestamp

    query = """insert ignore into runtimelog
                (timestamp,script_name,status)
                values ('%s','%s','%s')
                """ % (logtimestamp, script_name, status)

    dbio.commitToDb(query, 'logRuntimeStatus', cfg.config().mode.logtoinstance)
def main():

    lockscript.get_lock('alertgenexec')

    print dt.today().strftime("%c")

    c = cfg.config()

    ongoing = []

    mc = memcache.Client(['127.0.0.1:11211'], debug=0)

    proc_limit = c.io.proc_limit

    while True:
        alertgenlist = mc.get('alertgenlist')

        print alertgenlist

        if alertgenlist is None:
            break

        if len(alertgenlist) == 0:
            break

        col = alertgenlist.pop()

        mc.set('alertgenlist', [])
        mc.set('alertgenlist', alertgenlist)

        command = "~/anaconda2/bin/python %s %s" % (c.fileio.alertgenscript,
                                                    col)

        print "Running", col, "alertgen"

        if lockscript.get_lock('alertgen for %s' % col, exitifexist=False):
            p = subprocess.Popen(command,
                                 stdout=subprocess.PIPE,
                                 shell=True,
                                 stderr=subprocess.STDOUT)
        else:
            continue

        while countAlertAnalysisInstances() > proc_limit:
            time.sleep(5)
            print '.',
def CheckMessageSource(msg):
    c = cfg.config()
    identity = dbio.checkNumberIfExists(msg.simnum, 'community')
    if identity:
        smsmsg = "From: %s %s of %s\n" % (identity[0][1], identity[0][0],
                                          identity[0][2])
        smsmsg += msg.data
        server.WriteOutboxMessageToDb(smsmsg, c.smsalert.communitynum)
        return
    elif dbio.checkNumberIfExists(msg.simnum, 'dewsl'):
        print ">> From senslope staff"
        return

    name = dbio.checkNumberIfExists(msg.simnum, 'sensor')
    if name:
        print ">> From sensor", name[0][0]
    else:
        print "From unknown number ", msg.simnum
Exemple #10
0
def main():
    try:
        if sys.argv[1] == 'test': writetodb = False
        else: writetodb = True
    except:
        writetodb = True

    c = cfg.config()
    dbio.createTable("runtimelog", "runtime")
    server.logRuntimeStatus("alert", "checked")

    print '>> Checking for alert sms'
    alertmsg = server.CheckAlertMessages()

    print alertmsg
    if alertmsg:
        # server.WriteOutboxMessageToDb(alertmsg,c.smsalert.smartnum)
        # server.WriteOutboxMessageToDb(alertmsg,c.smsalert.globenum)
        query = """select nickname, numbers from dewslcontacts where grouptags like '%alert%'"""
        contacts = dbio.querydatabase(query, 'checkalert')
        # print contacts

        query = "INSERT INTO smsoutbox (timestamp_written,recepients,sms_msg,send_status) VALUES "

        tsw = dt.today().strftime("%Y-%m-%d %H:%M:%S")
        for item in contacts:
            message = 'SENSOR ALERT:\n%s' % (alertmsg)
            message = message.replace("ALERT", "AL3RT")
            query += "('%s','%s','%s','UNSENT')," % (tsw, item[1], message)
        query = query[:-1]

        if writetodb: dbio.commitToDb(query, 'checkalertmsg', 'GSM')
        else: print query
        print 'done'
    else:
        print '>> No alert msg read.'
    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin

def time_site(target,df_sa):
    if (target < len(df_sa)):
        site = df_sa['site'].iloc[target]
        t_time = df_sa.index[target]
        return site,t_time
    else:
        print "Error. Target > len(df_sa)"
        

io = cfg.config()
num_roll_window_ops = io.io.num_roll_window_ops
roll_window_length = io.io.roll_window_length
data_dt = io.io.data_dt
rt_window_length = io.io.rt_window_length

roll_window_numpts=int(1+roll_window_length/data_dt)

col_pos_interval = io.io.col_pos_interval
col_pos_num = io.io.num_col_pos
to_fill = io.io.to_fill
to_smooth = io.io.to_smooth
output_path = (__file__)
output_file_path = (__file__)
proc_file_path = (__file__)
CSVFormat = '.csv'
Exemple #12
0
def WriteEQAlertMessageToDb(alertmsg):
    c = cfg.config()
    WriteOutboxMessageToDb(alertmsg, c.smsalert.globenum)
    WriteOutboxMessageToDb(alertmsg, c.smsalert.smartnum)
Exemple #13
0
import gsmSerialio as gsmio
import numpy as np

from time import localtime, strftime

import pandas as pd
import cfgfileio as cfg

df = 0
casedf = 0
siteHealthdf = 0
statdf = 0

globaldf = pd.DataFrame()

c = cfg.config()


def readDataframe():

    # global globaldf
    localdf = 0
    db, cur = dbio.SenslopeDBConnect('local')
    query = '''SELECT logger_health.logger_id, name, model_id, sim_num, health_case from logger_health inner join loggers on logger_health.logger_id= loggers.logger_id inner join logger_contacts on logger_health.logger_id= logger_contacts.logger_id where logger_health.health_case !=5 and  logger_health.health_id IN (select max(logger_health.health_id) from logger_health group by logger_id)'''

    try:
        localdf = psql.read_sql(query, db)
    except pd.io.sql.DatabaseError, e:
        localdf = 0
    return localdf
def ProcessAllMessages(allmsgs, network):
    c = cfg.config()
    read_success_list = []
    read_fail_list = []

    cur_num = 0

    try:
        while allmsgs:
            isMsgProcSuccess = True
            print '\n\n*******************************************************'
            #gets per text message
            msg = allmsgs.pop(0)
            # msg.data = msg.data.upper()
            cur_num = msg.num

            msgname = checkNameOfNumber(msg.simnum)
            ##### Added for V1 sensors removes unnecessary characters pls see function PreProcessColumnV1(data)
            if re.search("\*FF", msg.data):
                ProcessPiezometer(msg.data, msg.simnum)
            # elif re.search("[A-Z]{4}DUE\*[A-F0-9]+\*\d+T?$",msg.data):
            elif re.search("[A-Z]{4}DUE\*[A-F0-9]+\*.*", msg.data):
                msg.data = PreProcessColumnV1(msg.data)
                ProcessColumn(msg.data, msg.dt, msg.simnum)
            elif re.search("EQINFO", msg.data.upper()):
                isMsgProcSuccess = ProcessEarthquake(msg)
            elif re.search("^PSIR ", msg.data.upper()):
                isMsgProcSuccess = qsi.ProcessServerInfoRequest(msg)
            elif re.search("^SENDGM ", msg.data.upper()):
                isMsgProcSuccess = qsi.ServerMessaging(msg)
            elif re.search("^ACK \d+ .+", msg.data.upper()):
                isMsgProcSuccess = amsg.processAckToAlert(msg)
            elif re.search("^ *(R(O|0)*U*TI*N*E )|(EVE*NT )",
                           msg.data.upper()):
                try:
                    gm = gndmeas.getGndMeas(msg.data)
                    RecordGroundMeasurements(gm)
                    # server.WriteOutboxMessageToDb("READ-SUCCESS: \n" + msg.data,c.smsalert.communitynum)
                    server.WriteOutboxMessageToDb(c.reply.successen,
                                                  msg.simnum)
                except ValueError as e:
                    print str(e)
                    errortype = re.search(
                        "(WEATHER|DATE|TIME|GROUND MEASUREMENTS|NAME)",
                        str(e).upper()).group(0)
                    print ">> Error in manual ground measurement SMS", errortype

                    server.WriteOutboxMessageToDb(
                        "READ-FAIL: (%s)\n%s" % (errortype, msg.data),
                        c.smsalert.communitynum)
                    server.WriteOutboxMessageToDb(str(e), msg.simnum)
                except:
                    server.WriteOutboxMessageToDb(
                        "READ-FAIL: (Unhandled) \n" + msg.data,
                        c.smsalert.communitynum)

            elif re.search("^[A-Z]{4,5}\*[xyabcXYABC]\*[A-F0-9]+\*[0-9]+T?$",
                           msg.data):
                try:
                    dlist = ProcTwoAccelColData(msg.data, msg.simnum, msg.dt)
                    if dlist:
                        if len(dlist[0][0]) == 6:
                            WriteSomsDataToDb(dlist, msg.dt)
                        else:
                            WriteTwoAccelDataToDb(dlist, msg.dt)
                            invokeProcessInBgnd(
                                "python ~/masynckaiser/client/bin/invoke-masync-CtoS-single.py %s"
                                % dlist[0][0])
                except IndexError:
                    print "\n\n>> Error: Possible data type error"
                    print msg.data
                except ValueError:
                    print ">> Value error detected"
            elif re.search("[A-Z]{4}\*[A-F0-9]+\*[0-9]+$", msg.data):
                #ProcessColumn(msg.data)
                ProcessColumn(msg.data, msg.dt, msg.simnum)
            #check if message is from rain gauge
            # elif re.search("^\w{4},[\d\/:,]+,[\d,\.]+$",msg.data):
            elif re.search("^\w{4},[\d\/:,]+", msg.data):
                ProcessRain(msg.data, msg.simnum)
            elif re.search(
                    r'(\w{4})[-](\d{1,2}[.]\d{02}),(\d{01}),(\d{1,2})/(\d{1,2}),#(\d),(\d),(\d{1,2}),(\d)[*](\d{10})',
                    msg.data):
                ProcessStats(msg.data, msg.dt)
            elif re.search("ARQ\+[0-9\.\+/\- ]+$", msg.data):
                ProcessARQWeather(msg.data, msg.simnum)
            elif msg.data.split('*')[0] == 'COORDINATOR' or msg.data.split(
                    '*')[0] == 'GATEWAY':
                isMsgProcSuccess = ProcessCoordinatorMsg(msg.data, msg.simnum)
            elif re.search("^MANUAL RESET", msg.data):
                server.WriteOutboxMessageToDb("SENSORPOLL SENSLOPE",
                                              msg.simnum)
                isMsgProcSuccess = True
            else:
                print '>> Unrecognized message format: '
                print 'NUM: ', msg.simnum
                print 'MSG: ', msg.data
                CheckMessageSource(msg)
                isMsgProcSuccess = False

            if isMsgProcSuccess:
                read_success_list.append(msg.num)
            else:
                read_fail_list.append(msg.num)
    # method for updating the read_status all messages that have been processed
    # so that they will not be processed again in another run
    except:
        # print all the traceback routine so that the error can be traced
        print(traceback.format_exc())
        print ">> Setting message read_status to fatal error"
        dbio.setReadStatus("FATAL ERROR", cur_num)

    return read_success_list, read_fail_list
Exemple #15
0
def worker(first_target, last_target):
    #load all global variables?
    summary = pd.DataFrame()
    s_f = pd.DataFrame()
    s_a = pd.DataFrame()
    io = cfg.config()

    num_roll_window_ops = io.io.num_roll_window_ops
    roll_window_length = io.io.roll_window_length
    data_dt = io.io.data_dt
    rt_window_length = io.io.rt_window_length

    roll_window_numpts = int(1 + roll_window_length / data_dt)

    col_pos_interval = io.io.col_pos_interval
    col_pos_num = io.io.num_col_pos
    to_fill = io.io.to_fill
    to_smooth = io.io.to_smooth
    #    output_path = (__file__)
    #    output_file_path = (__file__)
    #    proc_file_path = (__file__)
    CSVFormat = '.csv'
    #    PrintProc = io.io.printproc

    T_disp = io.io.t_disp
    T_velL2 = io.io.t_vell2
    T_velL3 = io.io.t_vell3
    k_ac_ax = io.io.k_ac_ax
    num_nodes_to_check = io.io.num_nodes_to_check
    colarrange = io.io.alerteval_colarrange.split(',')
    node_status = qdb.GetNodeStatus(1)

    for i in range(first_target, last_target):
        #        try:
        sites, custom_end = ffd.aim(i)
        sensorlist = qdb.GetSensorList(sites)
        for s in sensorlist:

            last_col = sensorlist[-1:]
            last_col = last_col[0]
            last_col = last_col.name

            # getting current column properties
            colname, num_nodes, seg_len = s.name, s.nos, s.seglen

            # list of working nodes
            node_list = range(1, num_nodes + 1)
            not_working = node_status.loc[(node_status.site == colname)
                                          & (node_status.node <= num_nodes)]
            not_working_nodes = not_working['node'].values
            for i in not_working_nodes:
                node_list.remove(i)

            proc_monitoring, monwin = generate_proc(colname, num_nodes,
                                                    seg_len, custom_end,
                                                    roll_window_length,
                                                    data_dt, rt_window_length,
                                                    num_roll_window_ops)

            xz_series_list, xy_series_list = create_series_list(
                proc_monitoring, monwin, colname, num_nodes)
            #            print "create_series_list tapos na"
            # create, fill and smooth dataframes from series lists
            xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin,
                                       roll_window_numpts, to_fill, to_smooth)
            xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin,
                                       roll_window_numpts, to_fill, to_smooth)

            # computing instantaneous velocity
            vel_xz, vel_xy = compute_node_inst_vel(xz, xy, roll_window_numpts)

            # computing cumulative displacements
            cs_x, cs_xz, cs_xy = compute_col_pos(xz, xy, monwin.index[-1],
                                                 col_pos_interval, col_pos_num,
                                                 seg_len)

            # processing dataframes for output
            xz, xy, xz_0off, xy_0off, vel_xz, vel_xy, vel_xz_0off, vel_xy_0off, cs_x, cs_xz, cs_xy, cs_xz_0, cs_xy_0 = df_to_out(
                colname,
                xz,
                xy,
                vel_xz,
                vel_xy,
                cs_x,
                cs_xz,
                cs_xy,
                #                                                                                                                       proc_file_path,
                CSVFormat)

            # Alert generation
            #            alert_out=alert_generation(colname,xz,xy,vel_xz,vel_xy,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,
            #                                       num_nodes_to_check,custom_end,CSVFormat,colarrange)
            alert_out = alert_generation(colname, xz, xy, vel_xz, vel_xy,
                                         num_nodes, T_disp, T_velL2, T_velL3,
                                         k_ac_ax, num_nodes_to_check,
                                         custom_end, CSVFormat, colarrange)

        alert_out = alert_out.reset_index(level=['id'])
        alert_out = alert_out[[
            'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert'
        ]]
        alert_out = alert_out[(alert_out['vel_alert'] > 0) |
                              (alert_out.node_alert == 'l2')]
        alert_out = alert_out[alert_out.id == 1]
        alert_out['site'] = sites
        summary = pd.concat((summary, alert_out), axis=0)
#        except:
#            print "Error recreating alarm."
#            continue
    print "--------------------Filtering chenes----------------------"
    print "--------------------Store yung mga nafilter----------------------"

    for j in range(0, len(summary)):
        #        try:
        sites, custom_end = time_site(j, summary)
        #        print "custom_end -------------> %s" %str(custom_end)
        sensorlist = qdb.GetSensorList(sites)
        for s in sensorlist:

            last_col = sensorlist[-1:]
            last_col = last_col[0]
            last_col = last_col.name

            # getting current column properties
            colname, num_nodes, seg_len = s.name, s.nos, s.seglen

            # list of working nodes
            node_list = range(1, num_nodes + 1)
            not_working = node_status.loc[(node_status.site == colname)
                                          & (node_status.node <= num_nodes)]
            not_working_nodes = not_working['node'].values
            for i in not_working_nodes:
                node_list.remove(i)

#            proc_monitoring,monwin=generate_proc(colname, num_nodes, seg_len, custom_end,f=True)
            proc_monitoring, monwin = generate_proc(colname,
                                                    num_nodes,
                                                    seg_len,
                                                    custom_end,
                                                    roll_window_length,
                                                    data_dt,
                                                    rt_window_length,
                                                    num_roll_window_ops,
                                                    filt=True)

            xz_series_list, xy_series_list = create_series_list(
                proc_monitoring, monwin, colname, num_nodes)

            xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin,
                                       roll_window_numpts, to_fill, to_smooth)
            xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin,
                                       roll_window_numpts, to_fill, to_smooth)

            # computing instantaneous velocity
            vel_xz, vel_xy = compute_node_inst_vel(xz, xy, roll_window_numpts)

            # computing cumulative displacements
            cs_x, cs_xz, cs_xy = compute_col_pos(xz, xy, monwin.index[-1],
                                                 col_pos_interval, col_pos_num,
                                                 seg_len)

            # processing dataframes for output
            xz, xy, xz_0off, xy_0off, vel_xz, vel_xy, vel_xz_0off, vel_xy_0off, cs_x, cs_xz, cs_xy, cs_xz_0, cs_xy_0 = df_to_out(
                colname,
                xz,
                xy,
                vel_xz,
                vel_xy,
                cs_x,
                cs_xz,
                cs_xy,
                #                                                                                                                       proc_file_path,
                CSVFormat)

            # Alert generation
            alert_out = alert_generation(colname, xz, xy, vel_xz, vel_xy,
                                         num_nodes, T_disp, T_velL2, T_velL3,
                                         k_ac_ax, num_nodes_to_check,
                                         custom_end, CSVFormat, colarrange)
        #    print alert_out

        alert_out = alert_out.reset_index(level=['id'])
        a_out = alert_out.copy()

        a_out = a_out[[
            'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert'
        ]]
        a_out = a_out[(a_out['vel_alert'] < 1.0) | (a_out.node_alert == 'l0')]
        a_out = a_out[a_out.id == 1]
        a_out['site'] = sites
        s_f = pd.concat((s_f, a_out), axis=0)

        b_out = alert_out.copy()
        b_out = b_out[[
            'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert'
        ]]
        b_out = b_out[(b_out['vel_alert'] > 0.0) | (b_out.node_alert == 'l2')]
        b_out = b_out[b_out.id == 1]
        b_out['site'] = sites
        s_a = pd.concat((s_a, b_out), axis=0)


#        except:
#            print "Error."
#            continue

    print "################# Drawing! Dahil drawing ka! ##################"
    print "################# Idrawing lahat ng nafilter! ##################"

    for k in range(0, len(s_f)):
        try:
            sites, custom_end = time_site(k, s_f)
            ce = custom_end.strftime("%y_%m_%d__%H_%M")
            fname = "FILTERED_" + str(sites) + "_" + ce + "_049_049"
            sensorlist = qdb.GetSensorList(sites)

            for s in sensorlist:
                last_col = sensorlist[-1:]
                last_col = last_col[0]
                last_col = last_col.name

                # getting current column properties
                colname, num_nodes, seg_len = s.name, s.nos, s.seglen

                # list of working nodes
                #            node_list = range(1, num_nodes + 1)
                #            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
                #            not_working_nodes = not_working['node'].values
                #            for i in not_working_nodes:
                #                node_list.remove(i)

                # importing proc_monitoring file of current column to dataframe
                #    try:
                #            print "proc_monitoring here: "
                proc_monitoring = generate_proc(colname,
                                                num_nodes,
                                                seg_len,
                                                custom_end,
                                                roll_window_length,
                                                data_dt,
                                                rt_window_length,
                                                num_roll_window_ops,
                                                filt=True,
                                                for_plots=True)
                #    print proc_monitoring
                proc_monitoring = proc_monitoring[proc_monitoring.id == 1]
                ffd.plotter(proc_monitoring, fname=fname)
        except:
            print "Error plotting Filtered."

    for k in range(0, len(s_a)):
        try:
            sites, custom_end = time_site(k, s_a)
            ce = custom_end.strftime("%y_%m_%d__%H_%M")

            sensorlist = qdb.GetSensorList(sites)
            for s in sensorlist:

                last_col = sensorlist[-1:]
                last_col = last_col[0]
                last_col = last_col.name

                # getting current column properties
                colname, num_nodes, seg_len = s.name, s.nos, s.seglen

                # list of working nodes
                #            node_list = range(1, num_nodes + 1)
                #            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
                #            not_working_nodes = not_working['node'].values
                #            for i in not_working_nodes:
                #                node_list.remove(i)

                # importing proc_monitoring file of current column to dataframe
                #    try:
                #            print "proc_monitoring here: "
                proc_monitoring = generate_proc(colname,
                                                num_nodes,
                                                seg_len,
                                                custom_end,
                                                roll_window_length,
                                                data_dt,
                                                rt_window_length,
                                                num_roll_window_ops,
                                                f=True,
                                                for_plots=True)
                #    print proc_monitoring
                proc_monitoring = proc_monitoring[proc_monitoring.id == 1]
                ffd.plotter(proc_monitoring, fname=fname)
        except:
            print "Error plotting Alarms."
Exemple #16
0
def RunSenslopeServer(network):
    minute_of_last_alert = dt.now().minute
    timetosend = 0
    lastAlertMsgSent = ''
    logruntimeflag = True
    global checkIfActive
    checkIfActive = True

    try:
        gsm = gsmio.gsmInit(network)
    except serial.SerialException:
        print '**NO COM PORT FOUND**'
        serverstate = 'serial'
        gsm.close()
        logRuntimeStatus(network, "com port error")
        raise ValueError(">> Error: no com port found")

    dbio.createTable("runtimelog", "runtime", cfg.config().mode.logtoinstance)
    logRuntimeStatus(network, "startup")

    dbio.createTable('smsinbox', 'smsinbox', cfg.config().mode.logtoinstance)
    dbio.createTable('smsoutbox', 'smsoutbox', cfg.config().mode.logtoinstance)

    sensor_numbers_str = str(getSensorNumbers())

    print '**' + network + ' GSM server active**'
    print time.asctime()
    while True:
        m = gsmio.countmsg()
        if m > 0:
            allmsgs = gsmio.getAllSms(network)

            try:
                WriteRawSmsToDb(allmsgs, sensor_numbers_str)
            except MySQLdb.ProgrammingError:
                print ">> Error: May be an empty line.. skipping message storing"

            deleteMessagesfromGSM()

            print dt.today().strftime("\n" + network +
                                      " Server active as of %A, %B %d, %Y, %X")
            logRuntimeStatus(network, "alive")

            trySendingMessages(network)

        elif m == 0:
            trySendingMessages(network)

            gsmio.gsmflush()
            today = dt.today()
            if (today.minute % 10 == 0):
                if checkIfActive:
                    print today.strftime(
                        "\nServer active as of %A, %B %d, %Y, %X")
                checkIfActive = False
            else:
                checkIfActive = True

        elif m == -1:
            print 'GSM MODULE MAYBE INACTIVE'
            serverstate = 'inactive'
            logRuntimeStatus(network, "gsm inactive")
            gsmio.resetGsm()

        elif m == -2:
            print '>> Error in parsing mesages: No data returned by GSM'
            gsmio.resetGsm()
        else:
            print '>> Error in parsing mesages: Error unknown'
            gsmio.resetGsm()
def getGndMeas(text):

    c = cfg.config()
    print '\n\n*******************************************************'
    print text

    # clean the message
    cleanText = re.sub(" +", " ", text.upper())
    cleanText = re.sub("\.+", ".", cleanText)
    cleanText = re.sub(";", ":", cleanText)
    cleanText = re.sub("\n", " ", cleanText)
    cleanText = cleanText.strip()
    sms_list = re.split(" ", re.sub("[\W]", " ", cleanText))

    sms_date = ""
    sms_time = ""
    records = []

    # check measurement type
    if sms_list[0][0] == 'R':
        meas_type = "ROUTINE"
    else:
        meas_type = "EVENT"

    data_field = re.split(" ", cleanText, maxsplit=2)[2]

    try:
        date_str = getDateFromSms(data_field)
        print "Date: " + date_str
    except ValueError:
        raise ValueError(c.reply.faildateen)
    except IOError:
        raise ValueError(c.reply.failooben)

    try:
        time_str = getTimeFromSms(data_field, date_str)
        print "Time: " + time_str
    except ValueError:
        raise ValueError(c.reply.failtimeen)
    except IOError:
        raise ValueError(c.reply.failooben)

    # get all the measurement pairs
    meas_pattern = "(?<= )[A-Z] *\d{1,3}\.*\d{0,2} *C*M"
    meas = re.findall(meas_pattern, data_field)
    # create records list
    if meas:
        pass
    else:
        raise ValueError(c.reply.failmeasen)

    # get all the weather information
    print repr(data_field)
    try:
        wrecord = re.search("(?<=" + meas[-1] + " )[A-Z]+",
                            data_field).group(0)
        recisvalid = False
        for keyword in [
                "ARAW", "ULAN", "BAGYO", "LIMLIM", "AMBON", "ULAP", "SUN",
                "RAIN", "CLOUD", "DILIM", "HAMOG"
        ]:
            if keyword in wrecord:
                recisvalid = True
                print "valid"
                break
        if not recisvalid:
            raise AttributeError
    except AttributeError:
        raise ValueError(c.reply.failweaen)

    # get all the name of reporter/s
    try:
        observer_name = re.search("(?<=" + wrecord + " ).+$",
                                  data_field).group(0)
        print observer_name
    except AttributeError:
        raise ValueError(c.reply.failobven)

    gnd_records = ""
    for m in meas:
        try:
            crid = m.split(" ", 1)[0]
            cm = m.split(" ", 1)[1]
            print cm
        except IndexError:
            crid = m[0]
            cm = m[1:]

        try:
            re.search("\d *CM", cm).group(0)
            cm = float(re.search("\d{1,3}\.*\d{0,2}", cm).group(0))
        except AttributeError:
            cm = float(re.search("\d{1,3}\.*\d{0,2}", cm).group(0)) * 100.0

        gnd_records = gnd_records + "('" + date_str + " " + time_str + "','" + sms_list[
            0] + "','" + sms_list[
                1] + "','" + observer_name + "','" + crid + "','" + str(
                    cm) + "','" + wrecord + "'),"

    gnd_records = gnd_records[:-1]

    site_code = sms_list[1].lower()
    ts = date_str + " " + time_str
    command = """~/anaconda2/bin/python %s %s "%s" > ~/scriptlogs/gndalert.txt 2>&1 && ~/anaconda2/bin/python %s %s "%s" > ~/scriptlogs/gndalert2.txt 2>&1 && ~/anaconda2/bin/python %s %s "%s" > ~/scriptlogs/gndalert3.txt 2>&1""" % (
        c.fileio.gndalert1, site_code, ts, c.fileio.gndalert2, site_code, ts,
        c.fileio.gndalert3, site_code, ts)

    p = subprocess.Popen(command,
                         stdout=subprocess.PIPE,
                         shell=True,
                         stderr=subprocess.STDOUT)

    return gnd_records
Exemple #18
0
import os, time, serial, re, sys
import MySQLdb
import datetime
import ConfigParser
from datetime import datetime as dt
from datetime import timedelta as td
import senslopedbio as dbio
import gsmSerialio as gsmio
import multiprocessing
import SomsServerParser as SSP
import math
import cfgfileio as cfg
import memcache
mc = memcache.Client(['127.0.0.1:11211'], debug=0)

if cfg.config().mode.script_mode == 'gsmserver':
    sys.path.insert(0, cfg.config().fileio.websocketdir)
    import dewsSocketLeanLib as dsll
#---------------------------------------------------------------------------------------------------------------------------


def updateSimNumTable(name, sim_num, date_activated):
    db, cur = dbio.SenslopeDBConnect('local')

    while True:
        try:
            query = """select sim_num from senslopedb.site_column_sim_nums
                where name = '%s' """ % (name)

            a = cur.execute(query)
            if a:
Exemple #19
0
def worker(first_target,last_target):
    #load all global variables?
    summary = pd.DataFrame()
    s_f = pd.DataFrame()
    s_a = pd.DataFrame()
    io = cfg.config()
    
    num_roll_window_ops = io.io.num_roll_window_ops
    roll_window_length = io.io.roll_window_length
    data_dt = io.io.data_dt
    rt_window_length = io.io.rt_window_length
    
    roll_window_numpts=int(1+roll_window_length/data_dt)
    
    col_pos_interval = io.io.col_pos_interval
    col_pos_num = io.io.num_col_pos
    to_fill = io.io.to_fill
    to_smooth = io.io.to_smooth
#    output_path = (__file__)
#    output_file_path = (__file__)
#    proc_file_path = (__file__)
    CSVFormat = '.csv'
#    PrintProc = io.io.printproc
    
    T_disp = io.io.t_disp
    T_velL2 = io.io.t_vell2 
    T_velL3 = io.io.t_vell3
    k_ac_ax = io.io.k_ac_ax
    num_nodes_to_check = io.io.num_nodes_to_check
    colarrange = io.io.alerteval_colarrange.split(',')   
    node_status = qdb.GetNodeStatus(1)

    for i in range(first_target,last_target):
#        try:
        sites,custom_end = ffd.aim(i)
        sensorlist = qdb.GetSensorList(sites)
        for s in sensorlist:
        
            last_col=sensorlist[-1:]
            last_col=last_col[0]
            last_col=last_col.name
            
            # getting current column properties
            colname,num_nodes,seg_len= s.name,s.nos,s.seglen
        
            # list of working nodes     
            node_list = range(1, num_nodes + 1)
            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
            not_working_nodes = not_working['node'].values  
            for i in not_working_nodes:
                node_list.remove(i)
        
            proc_monitoring,monwin=generate_proc(colname, num_nodes, seg_len, custom_end,roll_window_length,data_dt,rt_window_length,num_roll_window_ops)    

            xz_series_list,xy_series_list = create_series_list(proc_monitoring,monwin,colname,num_nodes)
    #            print "create_series_list tapos na"
            # create, fill and smooth dataframes from series lists
            xz=create_fill_smooth_df(xz_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
            xy=create_fill_smooth_df(xy_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
            
            # computing instantaneous velocity
            vel_xz, vel_xy = compute_node_inst_vel(xz,xy,roll_window_numpts)
            
            # computing cumulative displacements
            cs_x, cs_xz, cs_xy=compute_col_pos(xz,xy,monwin.index[-1], col_pos_interval, col_pos_num,seg_len)
        
            # processing dataframes for output
            xz,xy,xz_0off,xy_0off,vel_xz,vel_xy, vel_xz_0off, vel_xy_0off,cs_x,cs_xz,cs_xy,cs_xz_0,cs_xy_0 = df_to_out(colname,xz,xy,
                                                                                                                       vel_xz,vel_xy,
                                                                                                                       cs_x,cs_xz,cs_xy,
#                                                                                                                       proc_file_path,
                                                                                                                       CSVFormat)
                                                                                                                                  
            # Alert generation
#            alert_out=alert_generation(colname,xz,xy,vel_xz,vel_xy,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,
#                                       num_nodes_to_check,custom_end,CSVFormat,colarrange)
            alert_out=alert_generation(colname,xz,xy,vel_xz,vel_xy,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,num_nodes_to_check,custom_end,CSVFormat,colarrange)                                                                                                                                  
    
        alert_out = alert_out.reset_index(level = ['id'])
        alert_out = alert_out[['id','disp_alert','vel_alert','node_alert','col_alert']]
        alert_out = alert_out[(alert_out['vel_alert'] > 0 ) | (alert_out.node_alert == 'l2')]
        alert_out = alert_out[alert_out.id == 1]
        alert_out['site'] = sites
        summary = pd.concat((summary,alert_out),axis = 0)
#        except:
#            print "Error recreating alarm."
#            continue
    print "--------------------Filtering chenes----------------------"
    print "--------------------Store yung mga nafilter----------------------"
    
    for j in range(0,len(summary)):
#        try:
        sites,custom_end = time_site(j,summary)
#        print "custom_end -------------> %s" %str(custom_end)
        sensorlist = qdb.GetSensorList(sites)
        for s in sensorlist:
        
            last_col=sensorlist[-1:]
            last_col=last_col[0]
            last_col=last_col.name
            
            # getting current column properties
            colname,num_nodes,seg_len= s.name,s.nos,s.seglen
        
            # list of working nodes     
            node_list = range(1, num_nodes + 1)
            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
            not_working_nodes = not_working['node'].values  
            for i in not_working_nodes:
                node_list.remove(i)
        
#            proc_monitoring,monwin=generate_proc(colname, num_nodes, seg_len, custom_end,f=True)
            proc_monitoring,monwin=generate_proc(colname, num_nodes, seg_len, custom_end,roll_window_length,data_dt,rt_window_length,num_roll_window_ops,filt=True)    
     
            xz_series_list,xy_series_list = create_series_list(proc_monitoring,monwin,colname,num_nodes)
    
            xz=create_fill_smooth_df(xz_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
            xy=create_fill_smooth_df(xy_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
            
            # computing instantaneous velocity
            vel_xz, vel_xy = compute_node_inst_vel(xz,xy,roll_window_numpts)
            
            # computing cumulative displacements
            cs_x, cs_xz, cs_xy=compute_col_pos(xz,xy,monwin.index[-1], col_pos_interval, col_pos_num,seg_len)
        
            # processing dataframes for output
            xz,xy,xz_0off,xy_0off,vel_xz,vel_xy, vel_xz_0off, vel_xy_0off,cs_x,cs_xz,cs_xy,cs_xz_0,cs_xy_0 = df_to_out(colname,xz,xy,
                                                                                                                       vel_xz,vel_xy,
                                                                                                                       cs_x,cs_xz,cs_xy,
#                                                                                                                       proc_file_path,
                                                                                                                       CSVFormat)
                                                                                                                                  
            # Alert generation
            alert_out=alert_generation(colname,xz,xy,vel_xz,vel_xy,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,
                                       num_nodes_to_check,custom_end,CSVFormat,colarrange)
        #    print alert_out
            
        
        alert_out = alert_out.reset_index(level = ['id'])
        a_out = alert_out.copy()
        
        a_out = a_out[['id','disp_alert','vel_alert','node_alert','col_alert']]
        a_out = a_out[(a_out['vel_alert'] < 1.0 ) | (a_out.node_alert == 'l0')]
        a_out = a_out[a_out.id == 1]
        a_out['site'] = sites
        s_f = pd.concat((s_f,a_out),axis = 0)
        
        b_out = alert_out.copy()
        b_out = b_out[['id','disp_alert','vel_alert','node_alert','col_alert']]
        b_out = b_out[(b_out['vel_alert'] > 0.0 ) | (b_out.node_alert == 'l2')]
        b_out = b_out[b_out.id == 1]
        b_out['site'] = sites
        s_a = pd.concat((s_a,b_out),axis = 0)
#        except:
#            print "Error."
#            continue
    
    print "################# Drawing! Dahil drawing ka! ##################"
    print "################# Idrawing lahat ng nafilter! ##################"
    
    for k in range(0,len(s_f)):
        try:
            sites,custom_end = time_site(k,s_f)
            ce =  custom_end.strftime("%y_%m_%d__%H_%M")
            fname = "FILTERED_" +str(sites) + "_" + ce + "_049_049"
            sensorlist = qdb.GetSensorList(sites)
            
            for s in sensorlist:
                last_col=sensorlist[-1:]
                last_col=last_col[0]
                last_col=last_col.name
                
                # getting current column properties
                colname,num_nodes,seg_len= s.name,s.nos,s.seglen
            
                # list of working nodes     
        #            node_list = range(1, num_nodes + 1)
        #            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
        #            not_working_nodes = not_working['node'].values  
        #            for i in not_working_nodes:
        #                node_list.remove(i)
            
                # importing proc_monitoring file of current column to dataframe
            #    try:
            #            print "proc_monitoring here: "
                proc_monitoring=generate_proc(colname, num_nodes, seg_len, custom_end,roll_window_length,data_dt,rt_window_length,num_roll_window_ops,filt=True,for_plots=True)
            #    print proc_monitoring
                proc_monitoring = proc_monitoring[proc_monitoring.id == 1]
                ffd.plotter(proc_monitoring,fname=fname)
        except:
            print "Error plotting Filtered."
        
    for k in range(0,len(s_a)):
        try:
            sites,custom_end = time_site(k,s_a)
            ce =  custom_end.strftime("%y_%m_%d__%H_%M")
            
            sensorlist = qdb.GetSensorList(sites)
            for s in sensorlist:
                
                last_col=sensorlist[-1:]
                last_col=last_col[0]
                last_col=last_col.name
                
                # getting current column properties
                colname,num_nodes,seg_len= s.name,s.nos,s.seglen
            
                # list of working nodes     
    #            node_list = range(1, num_nodes + 1)
    #            not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
    #            not_working_nodes = not_working['node'].values  
    #            for i in not_working_nodes:
    #                node_list.remove(i)
            
                # importing proc_monitoring file of current column to dataframe
            #    try:
            #            print "proc_monitoring here: "
                proc_monitoring=generate_proc(colname, num_nodes, seg_len, custom_end,roll_window_length,data_dt,rt_window_length,num_roll_window_ops,f=True,for_plots=True)
            #    print proc_monitoring
                proc_monitoring = proc_monitoring[proc_monitoring.id == 1]
                ffd.plotter(proc_monitoring,fname=fname)
        except:
            print "Error plotting Alarms."      
Exemple #20
0
    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin

def time_site(target,df_sa):
    if (target < len(df_sa)):
        site = df_sa['site'].iloc[target]
        t_time = df_sa.index[target]
        return site,t_time
    else:
        print "Error. Target > len(df_sa)"
        

io = cfg.config()
num_roll_window_ops = io.io.num_roll_window_ops
roll_window_length = io.io.roll_window_length
data_dt = io.io.data_dt
rt_window_length = io.io.rt_window_length

roll_window_numpts=int(1+roll_window_length/data_dt)

col_pos_interval = io.io.col_pos_interval
col_pos_num = io.io.num_col_pos
to_fill = io.io.to_fill
to_smooth = io.io.to_smooth
output_path = (__file__)
output_file_path = (__file__)
proc_file_path = (__file__)
CSVFormat = '.csv'
def ProcessEarthquake(msg):
    line = msg.data.upper()
    print "Processing earthquake data"
    print line

    dbio.createTable('earthquake', 'earthquake')

    #find date
    if re.search("\d{1,2}\w+201[6789]", line):
        datestr_init = re.search("\d{1,2}\w+201[6789]", msg.data).group(0)
        pattern = ["%d%B%Y", "%d%b%Y"]
        datestr = None
        for p in pattern:
            try:
                datestr = dt.strptime(datestr_init, p).strftime("%Y-%m-%d")
                break
            except:
                print ">> Error in datetime conversion", datestr, "for pattern", p
        if datestr == None:
            return False
    else:
        print ">> No date string recognized"
        return False

    #find time
    if re.search("\d{1,2}[:\.]\d{1,2} *[AP]M", line):
        timestr = re.search("\d{1,2}[:\.]\d{1,2} *[AP]M", line).group(0)
        timestr = timestr.replace(" ", "").replace(".", ":")
        try:
            timestr = dt.strptime(timestr, "%I:%M%p").strftime("%H:%M:00")
        except:
            print ">> Error in datetime conversion", timestr
            return False
    else:
        print ">> No time string recognized"
        return False

    datetimestr = datestr + ' ' + timestr

    #find magnitude
    if re.search("((?<=M[SBLVOW]\=)|(?<=M\=)|(?<=MLV\=))\d+\.\d+(?= )", line):
        magstr = re.search(
            "((?<=M[SBLVOW]\=)|(?<=M\=)|(?<=MLV\=))\d+\.\d+(?= )",
            line).group(0)
    else:
        print ">> No magnitude string recognized"
        magstr = 'NULL'

    #find depth
    if re.search("(?<=D\=)\d+(?=K*M)", line):
        depthstr = re.search("(?<=D\=)\d+(?=K*M)", line).group(0)
    else:
        print ">> No depth string recognized"
        depthstr = 'NULL'

    #find latitude
    if re.search("\d+[\.\:]\d+(?=N)", line):
        latstr = re.search("\d+[\.\:]\d+(?=N)", line).group(0)
    else:
        print ">> No latitude string recognized"
        latstr = 'NULL'

    #find longitude
    if re.search("\d+[\.\:]\d+(?=E)", line):
        longstr = re.search("\d+[\.\:]\d+(?=E)", line).group(0)
    else:
        print ">> No longitude string recognized"
        longstr = 'NULL'

    #find epicenter distance
    if re.search("(?<=OR )\d+(?=KM)", line):
        diststr = re.search("(?<=OR )\d+(?=KM)", line).group(0)
    else:
        print ">> No distance string recognized"
        diststr = 'NULL'

    # find heading
    if re.search("[NS]\d+[EW]", line):
        headstr = re.search("[NS]\d+[EW]", line).group(0)
    else:
        print ">> No heading string recognized"
        headstr = 'NULL'

    # find Municipality
    if re.search("(?<=OF )[A-Z ]+(?= \()", line):
        munistr = re.search("(?<=OF )[A-Z ]+(?= \()", line).group(0)
    else:
        print ">> No municipality string recognized"
        munistr = 'NULL'

    # find province
    if re.search("(?<=\()[A-Z ]+(?=\))", line):
        provistr = re.search("(?<=\()[A-Z ]+(?=\))", line).group(0)
    else:
        print ">> No province string recognized"
        provistr = 'NULL'

    # find issuer
    if re.search("(?<=\<)[A-Z]+(?=\>)", line):
        issuerstr = re.search("(?<=\<)[A-Z]+(?=\>)", line).group(0)
    else:
        print ">> No issuer string recognized"
        issuerstr = 'NULL'

    query = "INSERT INTO senslopedb.earthquake (timestamp, mag, depth, lat, longi, dist, heading, municipality, province, issuer) VALUES ('%s',%s,%s,%s,%s,%s,'%s','%s','%s','%s') ON DUPLICATE KEY UPDATE mag=mag, depth=depth, lat=lat, longi=longi, dist=dist, heading=heading, municipality=municipality, province=province, issuer=issuer;" % (
        datetimestr, magstr, depthstr, latstr, longstr, diststr, headstr,
        munistr, provistr, issuerstr)

    print query

    query.replace("'NULL'", "NULL")

    dbio.commitToDb(query, 'earthquake')

    # subprocess.Popen(["python",cfg.config().fileio.eqprocfile])
    exec_line = "~/anaconda2/bin/python %s > ~/scriptlogs/earthquakescript.txt 2>&1" % (
        cfg.config().fileio.eqprocfile)
    p = subprocess.Popen(exec_line,
                         stdout=subprocess.PIPE,
                         shell=True,
                         stderr=subprocess.STDOUT)

    return True