def alert_toDB(df, table_name, window): query = "SELECT * FROM senslopedb.%s WHERE site = '%s' and timestamp <= '%s' AND updateTS >= '%s' ORDER BY timestamp DESC LIMIT 1" % ( table_name, df.site.values[0], window.end, window.end - timedelta(hours=1)) try: df2 = q.GetDBDataFrame(query) except: df2 = pd.DataFrame() if len(df2) == 0 or df2.alert.values[0] != df.alert.values[0]: engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) elif df2.alert.values[0] == df.alert.values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.%s SET updateTS='%s' WHERE site = '%s' and source = 'sensor' and alert = '%s' and timestamp = '%s'" % ( table_name, window.end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
def to_db(df): print df if not qdb.DoesTableExist('uptime'): create_uptime() ts = pd.to_datetime(df['ts'].values[0]) query = "SELECT * FROM uptime " query += "WHERE (ts <= '%s' " % ts query += " AND ts_updated >= '%s') " % ts query += "OR (ts_updated >= '%s' " % (ts - timedelta(hours=0.5)) query += " AND ts_updated <= '%s') " % ts query += "ORDER BY ts DESC LIMIT 1" prev_uptime = qdb.GetDBDataFrame(query) if len(prev_uptime) == 0 or prev_uptime['site_count'].values[0] != df[ 'site_count'].values[0]: qdb.PushDBDataFrame(df, 'uptime', index=False) elif pd.to_datetime( prev_uptime['ts_updated'].values[0]) < df['ts_updated'].values[0]: query = "UPDATE uptime " query += "SET ts_updated = '%s' " % pd.to_datetime( df['ts_updated'].values[0]) query += "WHERE uptime_id = %s" % prev_uptime['uptime_id'].values[0] db, cur = qdb.SenslopeDBConnect(qdb.Namedb) cur.execute(query) db.commit() db.close()
def GetASTIdata(site, rain_noah, offsetstart): ##INPUT: ##site; str ##offsetstart; datetime; starting point of interval with offset to account for moving window operations ##rain_noah; float; rain noah id of noah rain gauge near the site ##OUTPUT: ##df; dataframe; rainfall from noah rain gauge #data from noah rain gauge saved at local database try: if not math.isnan(rain_noah): rain_noah = int(rain_noah) db, cur = q.SenslopeDBConnect(q.Namedb) query = "select timestamp,rval from senslopedb.rain_noah_%s" % str( rain_noah) query = query + " where timestamp >= timestamp('%s')" % offsetstart query = query + " order by timestamp desc" df = q.GetDBDataFrame(query) df.columns = ['timestamp', 'rain'] df.timestamp = pd.to_datetime(df.timestamp) df.set_index('timestamp', inplace=True) return df except: print 'Table senslopedb.rain_noah_' + str(rain_noah) + " doesn't exist" df = pd.DataFrame(data=None) return df
def to_MySQL(df, table_name): engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) if table_name == 'rain_gauge': site = df['dev_id'].values[0] elif table_name == 'rain_props': site = df['name'].values[0] try: df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) print site, ': success' except: try: db, cur = q.SenslopeDBConnect(q.Namedb) if table_name == 'rain_gauge': query = "DELETE FROM %s WHERE dev_id = '%s'" % (table_name, site) elif table_name == 'rain_props': query = "DELETE FROM %s WHERE name = '%s'" % (table_name, site) cur.execute(query) db.commit() db.close() df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) print site, ': updated' except: print site, ': error'
def NOAHRGCoord(): db, cur = q.SenslopeDBConnect(q.Namedb) query = "SELECT * FROM senslopedb.rain_gauge" RGCoord = q.GetDBDataFrame(query) RGCoord['dev_id'] = RGCoord.dev_id.apply(lambda x: 'rain_noah_' + str(x)) RGCoord = RGCoord.rename(columns={'latitude': 'lat', 'longitude': 'lon'}) RGCoord['type'] = 'NOAHRG' return RGCoord
def write_next_task(col_name, timestamp): query = """ insert into senslopedb.to_run_scripts(script_name,stat) values ('task_RegenerateAlert.py %s %s','WAITING') """ % (col_name, str(timestamp)) db, cur = qdb.SenslopeDBConnect('senslopedb') cur.execute(query) db.commit() return 1
def write_initial_task(col_name): timestamp = pd.to_datetime('2017-01-01 00:30:00') query = """ insert into senslopedb.to_run_scripts(script_name,stat) values ('task_RegenerateAlert.py %s %s','WAITING') """ %(col_name,str(timestamp)) db,cur = qdb.SenslopeDBConnect('senslopedb') cur.execute(query) db.commit() # db.close() return 1
def DeleteOldNOAHdata(): #deletes data older than 15days dfRain = qs.GetRainNOAHList() db, cur = qs.SenslopeDBConnect(qs.Namedb) cur.execute("use " + qs.Namedb) for noahid in dfRain: print 'deleting old noah data for rain_noah_', noahid oldestTSneeded = str( pd.to_datetime(dt.now().strftime('%Y-%m-%d %H:%M:%S')) - td(15)) query = """DELETE FROM rain_noah_%s WHERE timestamp < TIMESTAMP('%s')""" % ( noahid, oldestTSneeded) cur.execute(query) db.commit() db.close()
def alert_toDB(df, table_name, window): query = "SELECT * FROM %s WHERE site = '%s' and source = 'noadjfilt' and timestamp <= '%s' AND updateTS >= '%s' ORDER BY timestamp DESC LIMIT 1" % ( table_name, df.site.values[0], window.end, window.end - timedelta(hours=0.5)) try: df2 = q.GetDBDataFrame(query) except: df2 = pd.DataFrame() try: same_alert = df2['alert'].values[0] == df['alert'].values[0] except: same_alert = False query = "SELECT EXISTS(SELECT * FROM %s" % table_name query += " WHERE timestamp = '%s' AND site = '%s'" % (pd.to_datetime( df['updateTS'].values[0]), df['site'].values[0]) if table_name == 'site_level_alert': query += " AND source = 'noadjfilt'" query += ")" if q.GetDBDataFrame(query).values[0][0] == 1: inDB = True else: inDB = False if (len(df2) == 0 or not same_alert) and not inDB: engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) elif same_alert and df2['updateTS'].values[0] < df['updateTS'].values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.%s SET updateTS='%s' WHERE site = '%s' and source = 'noadjfilt' and alert = '%s' and timestamp = '%s'" % ( table_name, window.end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
def sendColumnNamesToSocket(host, port): try: db, cur = qs.SenslopeDBConnect('senslopedb') print '>> Connected to database' #Get all column names with installation status of "Installed" queryColumns = 'SELECT name, version FROM site_column WHERE installation_status = "Installed" ORDER BY s_id ASC' try: cur.execute(queryColumns) except: print '>> Error parsing database' columns = cur.fetchall() # print columns for column in columns: columnName = column[0] sendDataFullCycle(host, port, columnName) print columnName except IndexError: print '>> Error in writing extracting database data to files..'
def removeinvpub(df): try: ts = pd.to_datetime(df['timestamp'].values[0]) db, cur = q.SenslopeDBConnect(q.Namedb) query = "SELECT * FROM (SELECT * FROM site_level_alert WHERE site = '%s' and source = 'public' and alert like '%s' and timestamp >= '%s' and updateTS <= '%s' order by timestamp desc) AS sub GROUP BY source" % ( df['site'].values[0], df['alert'].values[0] + '%', ts.date(), ts + timedelta(hours=4)) df = q.GetDBDataFrame(query) ts = pd.to_datetime(df['timestamp'].values[0]) query = "DELETE FROM site_level_alert where site = '%s' and source = 'public' and alert = '%s'" % ( df['site'].values[0], df['alert'].values[0]) query += " and timestamp = '%s'" % ts cur.execute(query) db.commit() query = "DELETE FROM site_level_alert where site = '%s' and source = 'internal' and alert like '%s'" % ( df['site'].values[0], df['alert'].values[0] + '%') query += " and timestamp = '%s'" % ts cur.execute(query) db.commit() db.close() except: pass
def alert_toDB(df): query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' AND source = 'rain' AND updateTS <= '%s' ORDER BY updateTS DESC LIMIT 1" % ( df.site.values[0], end) df2 = q.GetDBDataFrame(query) if len(df2) == 0 or df2.alert.values[0] != df.alert.values[0]: df['updateTS'] = end engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name='site_level_alert', con=engine, if_exists='append', schema=q.Namedb, index=False) elif df2.alert.values[0] == df.alert.values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.site_level_alert SET updateTS='%s' WHERE site = '%s' and source = 'rain' and alert = '%s' and timestamp = '%s'" % ( end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
if not path in sys.path: sys.path.insert(1, path) del path import querySenslopeDb as qs #column = raw_input('Enter column name: ') #gid = int(raw_input('Enter id: ')) #fdate = raw_input('Enter Start Date: ') #tdate = raw_input('Enter End Date: ') fdate = "2015-01-01 00:00:00" tdate = time.strftime("%Y-%m-%d %H:%M") try: db, cur = qs.SenslopeDBConnect('senslopedb') print '>> Connected to database' #Get all column names with installation status of "Installed" queryColumns = 'SELECT name, version FROM site_column WHERE installation_status = "Installed" ORDER BY s_id ASC' try: cur.execute(queryColumns) except: print '>> Error parsing database' columns = cur.fetchall() print columns for column in columns: columnName = column[0] if len(columnName) <= 6:
def execQuery(query): db, cur = q.SenslopeDBConnect(q.Namedb) cur.execute(query) db.commit() db.close()