def soms_data(start_date, end_date, site):
    
    db = q.GetSomsData(siteid = site, fromTime = start_date, toTime = end_date)
    df = db[db.msgid == 110]
    df.drop(['msgid','mval2'], axis = 1, inplace = True)
    
    return df
Beispiel #2
0
def getsomscaldata(column="", gid=0, fdate="", tdate=""):
    ''' 
        only for landslide sensors v2 and v3
        output:  df = series of unfiltered SOMS data (calibrated/normalized) of a specific node of the defined column 
        param:
            column = column name (ex. laysa)
            gid = geographic id of node [1-40]
    '''
    
    v2=['NAGSAM', 'BAYSBM', 'AGBSBM', 'MCASBM', 'CARSBM', 'PEPSBM','BLCSAM']
    v3=[ 'lpasam','lpasbm','laysam','laysbm','imesbm','barscm',
         'mngsam','gaasam','gaasbm','hinsam','hinsbm','talsam' ]
    df = pd.DataFrame()

    if column.upper() in v2:
        if column.upper()=='NAGSAM':
            msgid = 26
        else:
            msgid = 112
    elif column.lower() in v3: # if version 3
            msgid= 113
    else:
        print 'No data available for ' + column.upper()
        return df  
        
    try:
        df = qs.GetSomsData(siteid=column, fromTime=fdate, toTime=tdate, targetnode=gid, msgid=msgid)
        df.index=df.ts
        df= df.mval1

    except:
        print 'No data available for ' + column.upper()
        return df  

    return df
Beispiel #3
0
def getsomsrawdata(column="", gid="", fdate="", tdate=""):
    ''' 
        only for landslide sensors v2 and v3
        output:  sraw = series of unfiltered SOMS data (raw) of a specific node of the defined column 
        param:
            column = column name (ex. laysam)
            gid = geographic id of node [1-40]
    '''
    
    v2=['NAGSAM', 'BAYSBM', 'AGBSBM', 'MCASBM', 'CARSBM', 'PEPSBM','BLCSAM']
    v3=[ 'lpasam','lpasbm','laysam','laysbm','imesbm','barscm',
         'mngsam','gaasam','gaasbm','hinsam','hinsbm','talsam' ]
    df = pd.DataFrame(columns=['sraw', 'scal'])
#    print 'getsomsdata: ' + column + ',' + str(gid)
    try:
        df = qs.GetSomsData(siteid=column, fromTime=fdate, toTime=tdate, targetnode=gid)
    except:
        print 'No data available for ' + column.upper()
        return df
        
    df.index = df.ts

    if column.upper() in v2:
        if column.upper()=='NAGSAM':
            sraw =(((8000000/(df.mval1[(df.msgid==21)]))-(8000000/(df.mval2[(df.msgid==21)])))*4)/10
        else:
            sraw =(((20000000/(df.mval1[(df.msgid==111)]))-(20000000/(df.mval2[(df.msgid==111)])))*4)/10           

    elif column.lower() in v3: # if version 3
        sraw=df.mval1[(df.msgid==110)]
    else: # for returning null series if not in version 2 or 3
        sraw=pd.Series()
    
    return sraw
def somsdata(end_time, sen, start_time):
    data = q.GetSomsData(sen, start_time, end_time)
    #data['ts']=data['ts'].dt.round('30min')
    x = data[data.msgid == 110]
    x.drop(['mval2'], axis=1, inplace=True)

    return x
def soms(site, start, end):
    df = q.GetSomsData(site, start, end)
    df['ts'] = df['ts'].dt.round('30min')
    df = df[df.id < 7]
    df = df[df.msgid == 21]
    df = df.groupby('ts')['mval1'].mean()
    df = pd.DataFrame({'timestamp': df.index, 'mval1': df.values})

    return df
def getsomscaldata(column="", gid=0, fdate="", tdate="", if_multi=False):
    ''' 
        only for landslide sensors v2 and v3
        output:  df = series of unfiltered SOMS data (calibrated/normalized) of a specific node of the defined column 
        param:
            column = column name (ex. laysa)
            gid = geographic id of node [1-40]
    '''

    v2 = ['NAGSA', 'BAYSB', 'AGBSB', 'MCASB', 'CARSB', 'PEPSB', 'BLCSA']
    v3 = [
        'lpasa', 'lpasb', 'laysa', 'laysb', 'imesb', 'barsc', 'messb', 'imusc',
        'oslsc', 'mngsa', 'gaasa', 'gaasb', 'hinsa', 'hinsb', 'talsa'
    ]
    df = pd.DataFrame(columns=['sraw', 'scal'])
    df = pd.DataFrame()

    if column.upper() in v2:
        if column.upper() == 'NAGSA':
            msgid = 26
        else:
            msgid = 112
    elif column.lower() in v3:  # if version 3
        msgid = 113
    else:
        #	  if (is_debug == True):
        #	        print 'No data available for ' + column.upper()
        #	        return df
        #	  else:
        return df

#    try:
    df = qs.GetSomsData(siteid=column + 'm',
                        fromTime=fdate,
                        toTime=tdate,
                        targetnode=gid,
                        msgid=msgid)
    df.index = df.ts
    if if_multi:
        df = df[['id', 'mval1']]
    else:
        df = df[['mval1']]

#    except:

#        if (is_debug == True):
#	        print 'No data available for ' + column.upper()
#	        return df
#        else:
#              return df

    return df
Beispiel #7
0
    #    df['norm'] = (df['r15m'] - df['r15m'].min()) / (df['r15m'].max() - df['r15m'].min())
    df['roll_sum_1d'] = df['r15m'].rolling(48).sum()
    df['norm'] = (df['roll_sum_1d'] - df['roll_sum_1d'].min()) / (
        df['roll_sum_1d'].max() - df['roll_sum_1d'].min())
    #    df ['roll_sum_3d'] = df['r15m'].rolling(144).sum()
    rainfall = df.reset_index(drop=True)

    return rainfall


start = '2018-01-15'
end = '2018-10-10'
site = 'blcsbm'
gauge = 'nagtbw'

df = q.GetSomsData(site, start, end)
#df['roll_sum_1d'] = df['mval1'].rolling(48).mean()
#df = df[df.msgid == 21]
#############################################################################Rainfall
rainfall = rainfall(start, end, gauge)
rainfall = rainfall.dropna()
##############################################################################

df['ts'] = df['ts'].dt.round('30min')

n = int(df.id.max()) + 1

fig, (axes) = plt.subplots(n, 1, sharex=True, sharey=False)
fig.subplots_adjust(hspace=0)
fig.suptitle('Data points to be used for ANN ({} to {})'.format(start, end),
             fontsize=20)
Beispiel #8
0
import os
import sys
os.path.abspath('C:\Users\\Vidal Marvin Gabriel\\Desktop\DYNASLOPE\mycodes') #GroundDataAlertLib - line 19
sys.path.insert(1,os.path.abspath('C:\Users\\Vidal Marvin Gabriel\\Desktop\DYNASLOPE\updews-pycodes\Analysis')) #GroundDataAlertLib - line 21
import querySenslopeDb as q
import matplotlib.pyplot as plt
import pandas as pd
import data_mine_disp as dp

n = 7
##################################(Moist)
end = '2016-09-05'
col = 'imuscm'
start = '2016-07-21'
    
data = q.GetSomsData(col, start, end)
data['ts']=data['ts'].dt.round('30min')
x = data[data.msgid == 113]

x_x = pd.DataFrame(x)
x_x.drop('mval2', 1)
#x = data[data.index %2 != 0].set_index('ts') #sets only odd numbers
#data[data.index %2 == 0] #sets only even numbers
#data[data.index %2 == 0].reset_index()
#data[data.index %2 == 0].set_index('ts') #sets index into timestamp
nod = x_x[x_x.id == n]
nod ['moist_rate'] = nod['mval1'] - nod['mval1'].shift()
#nod ['rate'] = np.where((nod['mval1'] - nod['mval1'].shift()) >=10, (nod['mval1'] - nod['mval1'].shift()),0) 

rate = nod ['moist_rate'].abs()
Beispiel #9
0
                       ))  #GroundDataAlertLib - line 21
import querySenslopeDb as q
import matplotlib.pyplot as plt
import pandas as pd
import s_d_db as d
import ColumnPlotter as plter
import regression as rg
import numpy as np
from scipy.interpolate import UnivariateSpline

sen = 'lpasam'
start_time = '2017-01-02'
end_time = '2017-01-24'
#data = q.GetSomsData(sen, start_time, end_time)

data = q.GetSomsData(sen, start_time, end_time)

da = rg.regression2(data)
#da['vol'] = (da['grav'] * 1.3)
da['ts'] = da['ts'].dt.round('30min')
x = da[da.msgid == 110]
x.drop(['mval2', 'msgid'], axis=1, inplace=True)

#x.loc[(x['id'] >= 5), 'vol'] = 0.465651
#    data.loc[(x['id'] >= 5), 'vol'] = 0.345373
#    data.loc[(data['vol'] >= 0.345373), 'vol'] = 0.345373
#    m = data.ix[(data['nid'] >= 5)]
###########################################################################################################
#dfg = x.groupby (['ts','id'])['vol'].mean()
#
#da = pd.DataFrame(dfg)
def getsomsrawdata(column="", gid=0, fdate="", tdate="", if_multi=False):
    ''' 
        only for landslide sensors v2 and v3
        output:  sraw = series of unfiltered SOMS data (raw) of a specific node of the defined column 
        param:
            column = column name (ex. laysam)
            gid = geographic id of node [1-40]
    '''

    v2 = ['NAGSA', 'BAYSB', 'AGBSB', 'MCASB', 'CARSB', 'PEPSB', 'BLCSA']
    v3 = [
        'lpasa', 'lpasb', 'laysa', 'laysb', 'imesb', 'barsc', 'messb', 'imusc',
        'oslsc', 'mngsa', 'gaasa', 'gaasb', 'hinsa', 'hinsb', 'talsa'
    ]
    df = pd.DataFrame(columns=['sraw', 'scal'])
    sraw = pd.DataFrame()
    #    print 'getsomsdata: ' + column + ',' + str(gid)
    try:
        df = qs.GetSomsData(siteid=column + 'm',
                            fromTime=fdate,
                            toTime=tdate,
                            targetnode=gid)

    except:
        print 'No data available for ' + column.upper()
        return df

    df.index = df.ts

    if column.upper() in v2:
        if column.upper() == 'NAGSA':
            if if_multi:
                df = df[(df.msgid == 21)]
                sraw['mval1'] = (((8000000 / (df.mval1)) -
                                  (8000000 / (df.mval2))) * 4) / 10
                sraw['id'] = df[['id']]

            else:
                sraw['mval'] = (((8000000 / (df.mval1[(df.msgid == 21)])) -
                                 (8000000 /
                                  (df.mval2[(df.msgid == 21)]))) * 4) / 10

        else:
            if if_multi:
                df = df[(df.msgid == 111)]
                sraw['mval1'] = (((20000000 / (df.mval1)) -
                                  (20000000 / (df.mval2))) * 4) / 10
                sraw['id'] = df[['id']]
            else:
                sraw['mval1'] = (((20000000 / (df.mval1[(df.msgid == 111)])) -
                                  (20000000 /
                                   (df.mval2[(df.msgid == 111)]))) * 4) / 10

    elif column.lower() in v3:  # if version 3
        if if_multi:
            df = df[(df.msgid == 110)]
            sraw = df[['id', 'mval1']]
        else:
            df = df[(df.msgid == 110)]
            sraw = df[['mval1']]

    else:
        sraw = pd.Series()
        pass

    return sraw
Beispiel #11
0
def genproc(col, window, config, fixpoint, realtime=False, comp_vel=True):

    monitoring = q.GetSomsData(col, window.offsetstart, window.end)

    try:
        LastGoodData = q.GetLastGoodData(monitoring, col.nos)
        q.PushLastGoodData(LastGoodData, col.name)
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
    except:
        LastGoodData = q.GetLastGoodDataFromDb(col.name)

    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal = GetNodesWithNoInitialData(monitoring, col.nos,
                                               window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    if len(NodesNoInitVal) != 0:
        lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart)
        if len(lgdpm) != 0:
            lgdpm = flt.applyFilters(lgdpm)
            lgdpm = lgdpm.sort_index(ascending=False).drop_duplicates('id')

        if len(lgdpm) != 0:
            monitoring = monitoring.append(lgdpm)

    monitoring = monitoring.loc[monitoring.id <= col.nos]

    invalid_nodes = q.GetNodeStatus(1)
    invalid_nodes = invalid_nodes[invalid_nodes.site ==
                                  col.name]['node'].values
    monitoring = monitoring.loc[~monitoring.id.isin(invalid_nodes)]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({
        'name': [0] * len(nodes_noval),
        'id': nodes_noval,
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config,
                                                      fixpoint, col.nos)

    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')

    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(
        resamplenode, window=window).reset_index(level=1).set_index('ts')

    nodal_proc_monitoring = monitoring.groupby('id')

    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill

    filled_smoothened = nodal_proc_monitoring.apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z',
                                           'name']].reset_index()

    filled_smoothened['depth'] = filled_smoothened['x'] / np.abs(
        filled_smoothened['x']) * np.sqrt(col.seglen**2 -
                                          filled_smoothened['xz']**2 -
                                          filled_smoothened['xy']**2)
    filled_smoothened['depth'] = filled_smoothened['depth'].fillna(
        value=col.seglen)

    monitoring = filled_smoothened.set_index('ts')

    return procdata(col, disp_vel.sort(), max_min_df, max_min_cml)