def getDF(): site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] nid = sys.argv[4] # site = 'agbsb' # fdate = '2016-04-10' # tdate = '2016-04-11' # nid = '2' df = CSR.getsomsrawdata(site + 'm', nid, fdate, tdate) df = pd.DataFrame(df, columns=['raw']) #df.index = df.ts #print df dfajson = df.reset_index().to_json(orient="records", date_format='iso') dfajson = dfajson.replace("T", " ").replace("Z", "").replace(".000", "") print dfajson
def getDF(): site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] nid = sys.argv[4] #site = 'agbsb' #fdate = '2016-04-10' #tdate = '2016-04-11' #nid = '2' df= CSR.getsomsrawdata(site+'m',nid,fdate,tdate) df = pd.DataFrame(df,columns=['raw']) #df.index = df.ts #print df dfajson = df.reset_index().to_json(orient="records",date_format='iso') dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","") print dfajson
print '>> Error parsing database' columns = cur.fetchall() print columns for column in columns: columnName = column[0] if len(columnName) <= 6: #Get list of nodes for column queryNodes = 'SELECT DISTINCT id FROM %s WHERE id > 0 AND id < 40 ORDER BY id' % (columnName) cur.execute(queryNodes) nodes = cur.fetchall() print nodes for nodeData in nodes: node = nodeData[0] print "%s: %s" % (columnName, node) # test = soms.getsomscaldata(columnName, node, fdate, tdate) test = soms.getsomsrawdata(columnName, node, fdate, tdate) #print test print "row count: %s" % (len(test.index)) pass except IndexError: print '>> Error in writing extracting database data to files..' #test = soms.getsomsrawdata(column, gid, fdate, tdate) #print test
v_a2.rename(columns={'v':'v2'}, inplace=True) v_a2=v_a2.resample('30Min',base=0) x=pd.concat([df,v_a1.v1,v_a2.v2],axis=1,ignore_index=True) x.columns=['mval1','v1','v2'] x=x.resample('30Min',base=0) df=x.mval1[((x.v1>3.2) & (x.v1<3.4) & (x.v2>3.2) & (x.v2<3.4)) | (x.v1.isnull() & x.v2.isnull())] df = df.resample('30Min',base=0) return df column = 'gaasb' node = 2 mode = 0 fdate='2016-04-01' if mode==0: df = CSR.getsomsrawdata(column+'m',node,fdate) else: df = CSR.getsomscaldata(column+'m',node,fdate) f,ax=plt.subplots(4,sharex=True) ax[0].plot(df,color='b') out=seek_outlier(df,column,node,mode) #df[out].plot(style='ro') #plt.subplot(312) filtered=f_outlier(df,column,node,mode) ax[1].plot(filtered,color='m') #filtered.plot(color='m') filtered2= f_undervoltage(filtered,column,node,mode) ax[2].plot(filtered2,color='g')
def heatmap(col, t_timestamp, t_win='1d'): df_merge = pd.DataFrame() smin = 0 smax = 255 mini = 0 maxi = 1300 if (t_win == '1d'): for_base = 0 timew = 24 interval = '30T' elif (t_win == '3d'): if (int(t_timestamp[14] + t_timestamp[15]) == 30): # print int(t_timestamp[14]+t_timestamp[15]) for_base = 90 timew = 72 interval = '120T' else: for_base = 60 timew = 72 interval = '120T' elif (t_win == '30d'): for_base = 30 timew = 720 interval = 'D' else: print "invalid monitoring window" f_timestamp = pd.to_datetime( pd.to_datetime(t_timestamp) - timedelta(hours=timew)) t_timestamp = pd.to_datetime( pd.to_datetime(t_timestamp) + timedelta(minutes=30)) if (len(col) > 4): query = "select num_nodes from senslopedb.site_column_props where name = '%s'" % col node = qs.GetDBDataFrame(query) for node_num in range(1, int(node.num_nodes[0]) + 1): df = CSR.getsomscaldata(col, node_num, f_timestamp, t_timestamp, if_multi=True) if (df.empty == False): df = df.reset_index() df.ts = pd.to_datetime(df.ts) df.index = df.ts df.drop('ts', axis=1, inplace=True) df = df[((df < 1300) == True) & ((df > 0) == True)] df['cval'] = df['mval1'].apply(lambda x: (x - mini) * smax / (maxi) + smin) dfrs = pd.rolling_mean( df.resample(interval, base=for_base), window=3, min_periods=1) #mean for one day (dataframe) if 'mval1' in df.columns: dfrs = dfrs.drop('mval1', axis=1) # n=len(dfrs)-1 dfrs = dfrs.reset_index(0) # dfp=dfrs[n-timew:n] # dfp = dfp.reset_index() df_merge = pd.concat([df_merge, dfrs], axis=0) df_merge['ts'] = df_merge.ts.astype(object).astype(str) dfjson = df_merge.to_json(orient='records', double_precision=0) print dfjson else: return 'v1'
def heatmap(col, t_timestamp, t_win = '1d'): df_merge = pd.DataFrame() smin=0; smax=255;mini = 0; maxi = 1300 if (t_win == '1d'): for_base = 0 timew = 24 interval = '30T' elif (t_win == '3d'): if (int(t_timestamp[14]+t_timestamp[15]) == 30): # print int(t_timestamp[14]+t_timestamp[15]) for_base = 90 timew = 72 interval = '120T' else: for_base = 60 timew = 72 interval = '120T' elif (t_win == '30d'): for_base = 30 timew = 720 interval = 'D' else: print "invalid monitoring window" f_timestamp = pd.to_datetime(pd.to_datetime(t_timestamp) - timedelta(hours = timew)) t_timestamp = pd.to_datetime(pd.to_datetime(t_timestamp) + timedelta(minutes = 30)) if(len(col)>4): query = "select num_nodes from senslopedb.site_column_props where name = '%s'" %col node = qs.GetDBDataFrame(query) for node_num in range (1,int(node.num_nodes[0])+1): df = CSR.getsomscaldata(col,node_num,f_timestamp,t_timestamp, if_multi = True) if (df.empty == False): df = df.reset_index() df.ts=pd.to_datetime(df.ts) df.index=df.ts df.drop('ts', axis=1, inplace=True) df=df[((df<1300) == True) & ((df>0)==True)] df['cval'] = df['mval1'].apply(lambda x:(x- mini) * smax / (maxi) + smin) dfrs =pd.rolling_mean(df.resample(interval, base= for_base), window=3, min_periods=1) #mean for one day (dataframe) if 'mval1' in df.columns: dfrs = dfrs.drop('mval1', axis=1) # n=len(dfrs)-1 dfrs = dfrs.reset_index(0) # dfp=dfrs[n-timew:n] # dfp = dfp.reset_index() df_merge = pd.concat([df_merge, dfrs], axis = 0) df_merge['ts'] = df_merge.ts.astype(object).astype(str) dfjson = df_merge.to_json(orient='records' , double_precision=0) print dfjson else: return 'v1'
from datetime import datetime as dt import sqlalchemy from sqlalchemy import create_engine import requests path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../updews-pycodes/Analysis/')) if not path in sys.path: sys.path.insert(1, path) del path import SomsRangeFilter import ConvertSomsRaw as CSR site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] nid = sys.argv[4] mode = sys.argv[5] # site = 'gaasb' # nid = 2 # mode = 1 # fdate = '2016-04-01' # tdate ='2016-04-05' if mode == 0: df = CSR.getsomsrawdata(site,int(nid),fdate,tdate) else: df = CSR.getsomscaldata(site,int(nid),fdate,tdate) df_filt = SomsRangeFilter.f_outlier(df,site,int(nid),int(mode)) dfajson = df_filt.reset_index().to_json(orient='records',date_format='iso') dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","") print dfajson # print type(fdate)
from datetime import datetime as dt import sqlalchemy from sqlalchemy import create_engine import requests path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../updews-pycodes/Analysis/Soms')) if not path in sys.path: sys.path.insert(1, path) del path import SomsRangeFilter import ConvertSomsRaw as CSR #site = "agbsb" #fdate = "2016-01-28" #tdate = "2017-12-29" #mode = '0' site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] mode = sys.argv[4] if mode == '0': df = CSR.getsomsrawdata(column=site, fdate=fdate, tdate=tdate, if_multi=True ) else: df = CSR.getsomscaldata(column=site, fdate=fdate, tdate=tdate ,if_multi = True) df_filt = SomsRangeFilter.f_outlier(df,site,int(mode)) dfajson = df_filt.reset_index().to_json(orient='records',date_format='iso') dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","") print dfajson
from datetime import datetime as dt import sqlalchemy from sqlalchemy import create_engine import requests path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../updews-pycodes/Analysis/')) if not path in sys.path: sys.path.insert(1, path) del path import SomsRangeFilter import ConvertSomsRaw as CSR site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] nid = sys.argv[4] mode = sys.argv[5] #site = 'laysb' #nid = '2' #mode = '0' #fdate = '2016-01-15' #tdate ='2016-01-18' if mode == '0': df = CSR.getsomsrawdata(site,int(nid),fdate,tdate) else: df = CSR.getsomscaldata(site,int(nid),fdate,tdate) df_filt = SomsRangeFilter.f_outlier(df,site,int(nid),int(mode)) dfajson = df_filt.reset_index().to_json(orient='records',date_format='iso') dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","") print dfajson # print type(fdate)
from datetime import timedelta as td from datetime import datetime as dt import sqlalchemy from sqlalchemy import create_engine path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../updews-pycodes/Analysis/Soms')) if not path in sys.path: sys.path.insert(1, path) del path import ConvertSomsRaw as CSR import SomsRangeFilter site = sys.argv[1] fdate = sys.argv[2] tdate = sys.argv[3] nid = sys.argv[4] mode = sys.argv[5] #site = 'laysa' #fdate = '2013-04-02' #tdate = '2016-04-10' #nid = 1 df = CSR.getsomsrawdata(column=site, gid=nid, fdate=fdate, tdate=tdate, if_multi=False) dfajson = df.reset_index().to_json(orient="records", date_format='iso') dfajson = dfajson.replace("T", " ").replace("Z", "").replace(".000", "") print dfajson
column = "laysa" fdate='2013-01-1' tdate='2016-08-1' #for a in range(1,17,1): #def heatmap(column,fdate,tdate ): df_merge = pd.DataFrame() days = 30 query = "select num_nodes from senslopedb.site_column_props where name = '%s'" %column node = qs.GetDBDataFrame(query) for node_num in range (1,int(node.num_nodes[0])): df = CSR.getsomscaldata(column,node_num,fdate,tdate) # df=pd.read_csv('C:/Users/JosephRyan/Desktop/SENSLOPE/FILTERS/tester/output/'+ str(columns[a])+'_CAL.csv') # print df df = df.reset_index() df.ts=pd.to_datetime(df.ts) #df.drop('ts', axis=1, inplace=True) df.index=df.ts df.drop('ts', axis=1, inplace=True) #pd.to_datetime(df.index) #df=df[((df<5000) == True) & ((df>2000)==True)] df=df[((df<1300) == True) & ((df>0)==True)] dfrs =pd.rolling_mean(df.resample('1D'), window=3, min_periods=1) #mean for one day (dataframe) dfrs.rename(columns={'mval1':node_num}, inplace=True) #wmean=pd.ewma(df,span=48,min_periods=1) #ncols=range(1,int(nodecount[6])+1)