Beispiel #1
0
def getLsbChange():
    df = ofd.getFilteredData()
    isDFempty = df.empty

    if isDFempty == True:
        print 'No Data Available...'
    else:
        del df['id']
        df = df.set_index(['ts'])

        df2 = df.copy()
        dfa = []

        df3 = df2.resample('30Min').fillna(method='pad')
        dfv = df3 - df3.shift(12)

        if len(dfa) == 0:
            dfa = dfv.copy()
        else:
            dfa = dfa.append(dfv)

        dfa = dfa[pd.notnull(dfa.x)]

        dfajson = dfa.reset_index().to_json(orient="records",
                                            date_format='iso')
        dfajson = dfajson.replace("T", " ").replace("Z",
                                                    "").replace(".000", "")
        print dfajson
def getLsbChange():
    df = ofd.getFilteredData()
    isDFempty = df.empty
    
    if isDFempty == True:
        print 'No Data Available...'
    else:
        del df['id']
        df = df.set_index(['ts'])

        df2 = df.copy()
        dfa = []

        df3 = df2.resample('30Min').fillna(method='pad')
        dfv = df3 - df3.shift(12) 

        if len(dfa) == 0:
            dfa = dfv.copy()
        else:
            dfa = dfa.append(dfv)
            
        dfa = dfa[pd.notnull(dfa.x)]

        dfajson = dfa.reset_index().to_json(orient="records",date_format='iso')
        dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","")
        print dfajson  
def GenLsbAlerts():
    sites = GetSensorList()

    alertTxt = ""
    alertTxt2 = ""
    print "Getting lsb alerts"

    for site in sites:
        for nid in range(1, site.nos + 1):
            df = ofd.getFilteredData(
                isCmd=False,
                inSite=site.name,
                inNode=nid,
                inStart=(dt.now() - td(7)).strftime("%y/%m/%d %H:%M:%S"))
            isDFempty = df.empty

            if isDFempty == True:
                PrintOut('No Data Available... for %s %s' % (site.name, nid))
                continue

            df = df.set_index(['ts'])

            df2 = df.copy()
            dfa = []

            try:
                df3 = df2.resample('30Min').fillna(method='pad')
            except pd.core.groupby.DataError:
                #print "No data to resample %s %s" % (site.name, nid)
                continue
            dfv = df3 - df3.shift(12)

            if len(dfa) == 0:
                dfa = dfv.copy()
            else:
                dfa = dfa.append(dfv)

            window = 48
            dfarm = pd.rolling_mean(dfa, window)
            dfarm = dfarm[dfarm.index > dt.now() - td(1)]
            if (((abs(dfarm.x) > 0.25) | (abs(dfarm.y) > 0.25) |
                 (abs(dfarm.z) > 1.0)).any()):
                ins = "%s,%s" % (site.name, nid)
                alertTxt += ins
                alertTxt2 += ins
                print ins + '\t',

                if ((abs(dfarm.x) > 0.25).any()):
                    print 'x',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.x)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'

                if ((abs(dfarm.y) > 0.25).any()):
                    print 'y',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.y)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'

                if ((abs(dfarm.z) > 1.0).any()):
                    print 'z',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.z)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'

                print ''
                alertTxt += '\n'
                alertTxt2 += '\n'

    f = open('lsbalerts.csv', 'w')
    f.write(alertTxt)
    f.close()

    f = open('lsbalerts2.csv', 'w')
    f.write(alertTxt2)
    f.close()
Beispiel #4
0
def GenLsbAlerts():
    sites = GetSensorList()
    
    alertTxt = ""
    alertTxt2 = ""
    print "Getting lsb alerts"
    
    for site in sites:
        for nid in range(1, site.nos+1):
            df = ofd.getFilteredData(isCmd = False, inSite = site.name, inNode = nid, inStart = (dt.now()-td(7)).strftime("%y/%m/%d %H:%M:%S"))
            isDFempty = df.empty
            
            if isDFempty == True:
                PrintOut('No Data Available... for %s %s' % (site.name, nid))    
                continue
            
            df = df.set_index(['ts'])

            df2 = df.copy()
            dfa = []

            try:
                df3 = df2.resample('30Min').fillna(method='pad')
            except pd.core.groupby.DataError:
                #print "No data to resample %s %s" % (site.name, nid)
                continue
            dfv = df3 - df3.shift(12) 

            if len(dfa) == 0:
                dfa = dfv.copy()
            else:
                dfa = dfa.append(dfv)

            window = 48
            dfarm = pd.rolling_mean(dfa, window)
            dfarm = dfarm[dfarm.index > dt.now()-td(1)]
            if (((abs(dfarm.x)>0.25) | (abs(dfarm.y)>0.25) | (abs(dfarm.z)>1.0)).any()):
                ins = "%s,%s" % (site.name, nid)
                alertTxt += ins
                alertTxt2 += ins
                print ins + '\t',
                
                if ((abs(dfarm.x)>0.25).any()):
                    print 'x',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.x)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'

                if ((abs(dfarm.y)>0.25).any()):
                    print 'y',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.y)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'
                
                if ((abs(dfarm.z)>1.0).any()):
                    print 'z',
                    alertTxt += ',1'
                    alertTxt2 += ',' + repr(max(abs(dfarm.z)))
                else:
                    alertTxt += ',0'
                    alertTxt2 += ',0'

                print ''
                alertTxt += '\n'
                alertTxt2 += '\n'
            
    f = open('lsbalerts.csv', 'w')
    f.write(alertTxt)
    f.close()
    
    f = open('lsbalerts2.csv', 'w')
    f.write(alertTxt2)
    f.close()
def getDF():

    site = sys.argv[1]

    #time start
    timeStart = (dt.now()-td(2)).strftime("%y/%m/%d %H:%M:%S")
    timeEnd = (dt.now()).strftime("%y/%m/%d %H:%M:%S")
    timeTarget = (dt.now()-td(days=1,hours=6)).strftime("%Y-%m-%d %H:%M:%S")
    #print "End Time: " + timeEnd + ", Target Time: " + timeTarget

    #get the max number of nodes for the site
    engineRaw = create_engine('mysql+mysqldb://updews:[email protected]/senslopedb')
    query = "SELECT num_nodes FROM site_column_props WHERE name = '%s'" % (site)
    results = pd.io.sql.read_sql(query,engineRaw)
    maxNode = int(results.ix[0]["num_nodes"])

    allNodes = []

    for num in range(1, maxNode):
        #print "current node: %d" % (num)

        #Changed date difference is 1 day or 24 hours
        df = ofd.getFilteredData(isCmd = False, inSite = site, inNode = num, inStart = timeTarget)
        
        if df.empty:
            #for times that there are no data for the specific node
            continue

        #site should be "id". Its only "site" temporarily...
        df.columns = ['timestamp','site','xalert','yalert','zalert']
        df = df.set_index(['timestamp'])
        
        df2 = df.copy()
        dfa = []


        df3 = df2.resample('30Min').fillna(method='pad')
        #dfv = df3 - df3.shift(12)

        dfv = df3.copy()
        dfv.xalert = abs(df3.xalert - df3.xalert.shift(12))
        dfv.yalert = abs(df3.yalert - df3.yalert.shift(12))
        dfv.zalert = abs(df3.zalert - df3.zalert.shift(12))

        #Enable this for debugging
        #print dfv

        if len(dfa) == 0:
            dfa = dfv.copy()
        else:
            dfa = dfa.append(dfv)
                 
        dfa = dfa[pd.notnull(dfa.xalert)]

        if len(allNodes) == 0:
            allNodes = dfa.copy()
        else:
            allNodes = allNodes.append(dfv)
    
    allNodes = allNodes[pd.notnull(allNodes.xalert)]

    allNodes = allNodes[allNodes.index > dt.now()-td(hours=24, minutes=0)]

    dfajson = allNodes.reset_index().to_json(orient="records",date_format='iso')
    dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","")
    print dfajson
def getDF():

    site = sys.argv[1]

    #time start
    timeStart = (dt.now() - td(2)).strftime("%y/%m/%d %H:%M:%S")
    timeEnd = (dt.now()).strftime("%y/%m/%d %H:%M:%S")
    timeTarget = (dt.now() - td(days=1, hours=6)).strftime("%Y-%m-%d %H:%M:%S")
    #print "End Time: " + timeEnd + ", Target Time: " + timeTarget

    #get the max number of nodes for the site
    engineRaw = create_engine(
        'mysql+mysqldb://updews:[email protected]/senslopedb')
    query = "SELECT num_nodes FROM site_column_props WHERE name = '%s'" % (
        site)
    results = pd.io.sql.read_sql(query, engineRaw)
    maxNode = int(results.ix[0]["num_nodes"])

    allNodes = []

    for num in range(1, maxNode):
        #print "current node: %d" % (num)

        #Changed date difference is 1 day or 24 hours
        df = ofd.getFilteredData(isCmd=False,
                                 inSite=site,
                                 inNode=num,
                                 inStart=timeTarget)

        if df.empty:
            #for times that there are no data for the specific node
            continue

        #site should be "id". Its only "site" temporarily...
        df.columns = ['timestamp', 'site', 'xalert', 'yalert', 'zalert']
        df = df.set_index(['timestamp'])

        df2 = df.copy()
        dfa = []

        df3 = df2.resample('30Min').fillna(method='pad')
        #dfv = df3 - df3.shift(12)

        dfv = df3.copy()
        dfv.xalert = abs(df3.xalert - df3.xalert.shift(12))
        dfv.yalert = abs(df3.yalert - df3.yalert.shift(12))
        dfv.zalert = abs(df3.zalert - df3.zalert.shift(12))

        #Enable this for debugging
        #print dfv

        if len(dfa) == 0:
            dfa = dfv.copy()
        else:
            dfa = dfa.append(dfv)

        dfa = dfa[pd.notnull(dfa.xalert)]

        if len(allNodes) == 0:
            allNodes = dfa.copy()
        else:
            allNodes = allNodes.append(dfv)

    allNodes = allNodes[pd.notnull(allNodes.xalert)]

    allNodes = allNodes[allNodes.index > dt.now() - td(hours=23, minutes=30)]

    dfajson = allNodes.reset_index().to_json(orient="records",
                                             date_format='iso')
    dfajson = dfajson.replace("T", " ").replace("Z", "").replace(".000", "")
    print dfajson