def create_rainfall_alerts(connection='analysis'): """Create table for rainfall_alerts. """ if not does_table_exist('rainfall_alerts', connection=connection): query = "CREATE TABLE `rainfall_alerts` (" query += " `ra_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NULL," query += " `site_id` TINYINT(3) UNSIGNED NOT NULL," query += " `rain_id` SMALLINT(5) UNSIGNED NOT NULL," query += " `rain_alert` CHAR(1) NOT NULL," query += " `cumulative` DECIMAL(5,2) UNSIGNED NULL," query += " `threshold` DECIMAL(5,2) UNSIGNED NULL," query += " PRIMARY KEY (`ra_id`)," query += " INDEX `fk_sites1_idx` (`site_id` ASC)," query += " INDEX `fk_rainfall_gauges1_idx` (`rain_id` ASC)," query += " UNIQUE INDEX `uq_rainfall_alerts` (`ts` ASC, `site_id` ASC, `rain_alert` ASC)," query += " CONSTRAINT `fk_sites1`" query += " FOREIGN KEY (`site_id`)" query += " REFERENCES `sites` (`site_id`)" query += " ON DELETE CASCADE" query += " ON UPDATE CASCADE," query += " CONSTRAINT `fk_rainfall_gauges1`" query += " FOREIGN KEY (`rain_id`)" query += " REFERENCES `rainfall_gauges` (`rain_id`)" query += " ON DELETE CASCADE" query += " ON UPDATE CASCADE)" db.write(query, connection=connection)
def update_shift_tags(): # remove tags to old shifts today = dt.today().strftime("%Y-%m-%d %H:%M:%S") print('Updating shift tags for', today) query = ( "update senslopedb.dewslcontacts set grouptags = " "replace(grouptags,',alert-mon','') where grouptags like '%alert-mon%'" ) dbio.write(query, 'update_shift_tags') # update the tags of current shifts query = ( "update dewslcontacts as t1," "(select timestamp,iompmt,iompct,oomps,oompmt,oompct from monshiftsched" " where timestamp < '%s' " " order by timestamp desc limit 1" ") as t2" "set t1.grouptags = concat(t1.grouptags,',alert-mon')" "where t1.nickname = t2.iompmt or" "t1.nickname = t2.iompct or" "t1.nickname = t2.oomps or" "t1.nickname = t2.oompmt or" "t1.nickname = t2.oompct") % (today) dbio.write(query, 'update_shift_tags')
def log_runtime_status(script_name, status): """ - The process of logging runtime status is a function that log the runtime of script if alive or not. :param script_name: Script file name. :param status: script runtime status. :type script_name: str :type status: str """ sc = mem.server_config() gsm_modules_host = sc["resource"]["smsdb"] if (status == 'alive'): ts = dt.today() diff = (ts.minute % 10) * 60 + ts.second ts = ts - td(seconds=diff) logtimestamp = ts.strftime("%Y-%m-%d %H:%M:00") else: logtimestamp = dt.today().strftime("%Y-%m-%d %H:%M:00") print(">> Logging runtime '" + status + "' at " + logtimestamp) query = ("insert ignore into runtimelog (ts, script_name, log_details) " "Values ('%s','%s','%s')") % (logtimestamp, script_name, status) db.write(query, 'log_runtime_status', False, gsm_modules_host) """
def create_alert_status(): query = "CREATE TABLE `alert_status` (" query += " `stat_id` INT(7) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts_last_retrigger` TIMESTAMP NULL," query += " `trigger_id` INT(10) UNSIGNED NULL," query += " `ts_set` TIMESTAMP NULL," query += " `ts_ack` TIMESTAMP NULL," query += " `alert_status` TINYINT(1) NULL" query += " COMMENT 'alert_status:\n-1 invalid\n0 validating\n1 valid'," query += " `remarks` VARCHAR(450) NULL," query += " `user_id` SMALLINT(6) UNSIGNED NULL," query += " PRIMARY KEY (`stat_id`)," query += " INDEX `fk_alert_status_operational_triggers1_idx` (`trigger_id` ASC)," query += " CONSTRAINT `fk_alert_status_operational_triggers1`" query += " FOREIGN KEY (`trigger_id`)" query += " REFERENCES `operational_triggers` (`trigger_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE," query += " INDEX `fk_alert_status_users1_idx` (`user_id` ASC)," query += " CONSTRAINT `fk_alert_status_users1`" query += " FOREIGN KEY (`user_id`)" query += " REFERENCES `users` (`user_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE," query += " UNIQUE INDEX `uq_alert_status`" query += " (`ts_last_retrigger` ASC, `trigger_id` ASC))" db.write(query)
def create_rainfall_priorities(): """Creates rainfall_priorities table; record of distance of nearby rain gauges to sites for rainfall alert analysis. """ query = "CREATE TABLE `rainfall_priorities` (" query += " `priority_id` SMALLINT(5) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `rain_id` SMALLINT(5) UNSIGNED NOT NULL," query += " `site_id` TINYINT(3) UNSIGNED NOT NULL," query += " `distance` DECIMAL(5,2) UNSIGNED NOT NULL," query += " PRIMARY KEY (`priority_id`)," query += " INDEX `fk_rainfall_priorities_sites1_idx` (`site_id` ASC)," query += " INDEX `fk_rainfall_priorities_rain_gauges1_idx` (`rain_id` ASC)," query += " UNIQUE INDEX `uq_rainfall_priorities` (`site_id` ASC, `rain_id` ASC)," query += " CONSTRAINT `fk_rainfall_priorities_sites1`" query += " FOREIGN KEY (`site_id`)" query += " REFERENCES `sites` (`site_id`)" query += " ON DELETE CASCADE" query += " ON UPDATE CASCADE," query += " CONSTRAINT `fk_rainfall_priorities_rain_gauges1`" query += " FOREIGN KEY (`rain_id`)" query += " REFERENCES `rainfall_gauges` (`rain_id`)" query += " ON DELETE CASCADE" query += " ON UPDATE CASCADE)" db.write(query)
def main(): print(datetime.now().strftime("%d-%b-%Y (%H:%M:%S)")) columns = ['logger_id', 'presence', 'last_data', 'ts_updated', 'diff_days'] df = pd.DataFrame(columns=columns) query = "DELETE FROM data_presence_loggers" db.write(query, connection='analysis') dftosql(df)
def process_ack_to_alert(sms): try: stat_id = re.search("(?<=K )\d+(?= )", sms.msg, re.IGNORECASE).group(0) except IndexError: errmsg = "Error in parsing alert id. Please try again" # smstables.write_outbox(errmsg,sms.sim_num) return False user_id, nickname, def_gsm_id = get_name_of_staff(sms.sim_num) print(user_id, nickname, sms.msg) if re.search("server", nickname.lower()): try: nickname = re.search("(?<=-).+(?= from)", sms.msg).group(0) except AttributeError: print("Error in processing nickname") # else: # name = nickname try: remarks = re.search("(?<=\d ).+(?=($|\r|\n))", sms.msg, re.IGNORECASE).group(0) except AttributeError: errmsg = "Please put in your remarks." smstables.write_outbox(message=errmsg, recipients=sms.sim_num, gsm_id=def_gsm_id, table='users') # write_outbox_dyna(errmsg, sms.sim_num) return True try: alert_status = re.search("(in)*valid(ating)*", remarks, re.IGNORECASE).group(0) remarks = remarks.replace(alert_status, "", 1).strip() except AttributeError: errmsg = ("Please put in the alert status validity." " i.e (VALID, INVALID, VALIDATING)") smstables.write_outbox(message=errmsg, recipients=sms.sim_num, gsm_id=def_gsm_id, table='users') # write_outbox_dyna(errmsg, sms.sim_num) return True alert_status_dict = {"validating": 0, "valid": 1, "invalid": -1} remarks = remarks.replace("'", r"\'").replace('"', r'\"') query = ("update alert_status set user_id = %d, alert_status = %d, " "ts_ack = '%s', remarks = '%s' where stat_id = %s") % (user_id, alert_status_dict[alert_status.lower()], sms.ts, remarks, stat_id) # print query dbio.write(query=query, resource="sensor_data") contacts = shift.get_mobile(get_IOMP()+['CT Phone']) message = ("\nAlert ID %s ACK by %s on %s\nStatus: %s\n" "Remarks: %s") % (stat_id, nickname, sms.ts, alert_status, remarks) smstables.write_outbox(message=message, recipients=contacts, with_mobile_id=True, table='users') return True
def set_static_variable(name=""): """ - Description. Args: Args (str): Args. Returns: Returns. Raises: MySQLdb.OperationalError: Error in database connection. """ query = ("Select name, query, data_type, ts_updated, resource from " "static_variables") date = dt.now() date = date.strftime('%Y-%m-%d %H:%M:%S') if name != "": query += " where name = '%s'" % (name) try: variables = dbio.read(query=query, resource='common_data') except MySQLdb.ProgrammingError: print(">> static_variables table does not exist on host") return if not variables: print("Error getting static variable information") return False for data in variables: variable_info = VariableInfo(data) if variable_info.type == 'data_frame': static_output = dbio.df_read(query=variable_info.query, resource=variable_info.resource) elif variable_info.type == 'dict': static_output = dict_format(variable_info.query, variable_info) else: static_output = dbio.read(query=variable_info.query, resource=variable_info.resource) if static_output is None: warnings.warn('Query error: ' + variable_info.name, stacklevel=2) else: memory.set(variable_info.name, static_output) query_ts_update = ("UPDATE static_variables SET " " ts_updated ='%s' WHERE name ='%s'") % ( date, variable_info.name) dbio.write(query=query_ts_update, resource='common_data') print(variable_info.name, "success")
def create_tsm_alerts(): query = "CREATE TABLE `tsm_alerts` (" query += " `ta_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NULL," query += " `tsm_id` SMALLINT(5) UNSIGNED NOT NULL," query += " `alert_level` TINYINT(2) NOT NULL," query += " `ts_updated` TIMESTAMP NULL," query += " PRIMARY KEY (`ta_id`)," query += " UNIQUE INDEX `uq_tsm_alerts` (`ts` ASC, `tsm_id` ASC)," query += " INDEX `fk_tsm_alerts_tsm_sensors1_idx` (`tsm_id` ASC)," query += " CONSTRAINT `fk_tsm_alerts_tsm_sensors1`" query += " FOREIGN KEY (`tsm_id`)" query += " REFERENCES `tsm_sensors` (`tsm_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE)" db.write(query)
def create_marker_alerts_table(): """Creates the marker alerts table""" query = "CREATE TABLE IF NOT EXISTS `marker_alerts` (" query += " `ma_id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, " query += " `data_id` SMALLINT(6) UNSIGNED, " query += " `displacement` FLOAT, " query += " `time_delta` FLOAT, " query += " `alert_level` TINYINT(1), " query += " PRIMARY KEY (ma_id), " query += " INDEX `fk_marker_alerts_marker_data_idx` (`data_id` ASC), " query += " CONSTRAINT `fk_marker_alerts_marker_data` " query += " FOREIGN KEY (`data_id`) " query += " REFERENCES `marker_data` (`data_id`) " query += " ON DELETE CASCADE " query += " ON UPDATE CASCADE" db.write(query)
def create_rainfall_gauges(): """Creates rainfall_gauges table; record of available rain gauges for rainfall alert analysis. """ query = "CREATE TABLE `rainfall_gauges` (" query += " `rain_id` SMALLINT(5) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `gauge_name` VARCHAR(5) NOT NULL," query += " `data_source` VARCHAR(8) NOT NULL," query += " `latitude` DECIMAL(9,6) UNSIGNED NOT NULL," query += " `longitude` DECIMAL(9,6) UNSIGNED NOT NULL," query += " `date_activated` DATE NOT NULL," query += " `date_deactivated` DATE NULL," query += " PRIMARY KEY (`rain_id`)," query += " UNIQUE INDEX `gauge_name_UNIQUE` (`gauge_name` ASC))" db.write(query)
def create_node_alerts(): query = "CREATE TABLE `node_alerts` (" query += " `na_id` INT(5) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NOT NULL," query += " `tsm_id` SMALLINT(5) UNSIGNED NOT NULL," query += " `node_id` SMALLINT(5) UNSIGNED NOT NULL," query += " `disp_alert` TINYINT(1) NOT NULL DEFAULT 0," query += " `vel_alert` TINYINT(1) NOT NULL DEFAULT 0," query += " PRIMARY KEY (`na_id`)," query += " UNIQUE INDEX `uq_node_alerts` (`ts` ASC, `tsm_id` ASC, `node_id` ASC)," query += " INDEX `fk_node_alerts_tsm_sensors1_idx` (`tsm_id` ASC)," query += " CONSTRAINT `fk_node_alerts_tsm_sensors1`" query += " FOREIGN KEY (`tsm_id`)" query += " REFERENCES `tsm_sensors` (`tsm_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE)" db.write(query, connection='local')
def send_alert_message(): # check due alert messages # ts_due = dt.today() # query = ("select alert_id, alert_msg from sms_alerts where alert_status is" # " null and ts_set <= '%s'") % (ts_due.strftime("%Y-%m-%d %H:%M:%S")) # alertmsg = dbio.read(query,'send_alert_message') alert_msgs = check_alerts() contacts = get_alert_staff_numbers() if len(alert_msgs) == 0: print('No alertmsg set for sending') return for (stat_id, site_id, site_code, trigger_source, alert_symbol, ts_last_retrigger) in alert_msgs: tlr_str = ts_last_retrigger.strftime("%Y-%m-%d %H:%M:%S") message = ("As of %s\n" "Alert ID %d:\n" "%s:%s:%s") % (tlr_str, stat_id, site_code, alert_symbol, trigger_source) message += alert_details(site_id, trigger_source, ts_last_retrigger) message += "\n\nText\nACK <alert_id> <validity> <remarks>" # send to alert staff recipients_list = "" for mobile_id, sim_num, gsm_id in contacts: recipients_list += "%s," % (sim_num) recipients_list = recipients_list[:-1] smstables.write_outbox(message=message, recipients=recipients_list, gsm_id=gsm_id, table='users') # # set alert to 15 mins later ts_due = dt.now() + td(seconds=60 * 15) query = ("update alert_status set ts_set = '%s' where " "stat_id = %s") % (ts_due.strftime("%Y-%m-%d %H:%M:%S"), stat_id) dbio.write(query, 'checkalertmsg')
def create_NOAH_table(gauge_name): """Create table for gauge_name. """ query = "CREATE TABLE `%s` (" % gauge_name query += " `data_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NOT NULL," query += " `rain` DECIMAL(4,1) NOT NULL," query += " `temperature` DECIMAL(3,1) NULL DEFAULT NULL," query += " `humidity` DECIMAL(3,1) NULL DEFAULT NULL," query += " `battery1` DECIMAL(4,3) NULL DEFAULT NULL," query += " `battery2` DECIMAL(4,3) NULL DEFAULT NULL," query += " `csq` TINYINT(3) NULL DEFAULT NULL," query += " PRIMARY KEY (`data_id`)," query += " UNIQUE INDEX `ts_UNIQUE` (`ts` ASC))" query += " ENGINE = InnoDB" query += " DEFAULT CHARACTER SET = utf8;" print_out("Creating table: %s..." % gauge_name) db.write(query)
def set_send_status(table='',status_list='',host='local',resource="sms_data"): # print status_list if not table: raise ValueError("No table definition") if not status_list: raise ValueError("No status list definition") query = ("insert into smsoutbox_%s_status (stat_id,send_status,ts_sent," "outbox_id,gsm_id,mobile_id) values ") % (table[:-1]) for stat_id,send_status,ts_sent,outbox_id,gsm_id,mobile_id in status_list: query += "(%d,%d,'%s',%d,%d,%d)," % (stat_id, send_status, ts_sent, outbox_id, gsm_id, mobile_id) query = query[:-1] query += (" on duplicate key update stat_id=values(stat_id), " "send_status=send_status+values(send_status),ts_sent=values(ts_sent)") # print query dbio.write(query=query, last_insert=False, host=host, resource=resource)
def set_read_status(sms_id_list='',read_status=0,table='',host='local', resource="sms_data"): if table == '': raise ValueError("No table definition") if type(sms_id_list) is list: if len(sms_id_list) == 0: print (">> Nothing to do here") return else: where_clause = ("where inbox_id " "in (%s)") % (str(sms_id_list)[1:-1].replace("L","")) elif type(sms_id_list) in (int, float): where_clause = "where inbox_id = %d" % (sms_id_list) else: raise ValueError("Unknown sms_id_list type") query = "update smsinbox_%s set read_status = %d %s" % (table, read_status, where_clause) # print query dbio.write(query=query, host=host, resource=resource)
def create_NOAH_table(gauge_name, connection='analysis'): """Create table for gauge_name. """ if not does_table_exist(gauge_name, connection=connection): query = "CREATE TABLE `{}` (".format(gauge_name) query += " `data_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NOT NULL," query += " `rain` DECIMAL(4,1) NOT NULL," query += " `temperature` DECIMAL(3,1) NULL DEFAULT NULL," query += " `humidity` DECIMAL(3,1) NULL DEFAULT NULL," query += " `battery1` DECIMAL(4,3) NULL DEFAULT NULL," query += " `battery2` DECIMAL(4,3) NULL DEFAULT NULL," query += " `csq` TINYINT(3) NULL DEFAULT NULL," query += " PRIMARY KEY (`data_id`)," query += " UNIQUE INDEX `ts_UNIQUE` (`ts` ASC))" query += " ENGINE = InnoDB" query += " DEFAULT CHARACTER SET = utf8;" print_out("Creating table: {}...".format(gauge_name)) db.write(query, connection=connection)
def logger_response(sms,log_type,log='False'): """ - The process of logging the id of the match expression on table logger_respose. :param sms: list data info of sms message . :param Log_type: list data info of sms message . :param Log: Switch on or off the logging of the response. :type sms: list :type sms: str :type sms: str, Default(False) """ conn = mem.get('DICT_DB_CONNECTIONS') if log: query = ("INSERT INTO %s.logger_response (`logger_Id`, `inbox_id`, `log_type`)" "values((Select logger_id from %s.logger_mobile where sim_num = %s order by" " date_activated desc limit 1),'%s','%s')" % (conn['analysis']['schema'],conn['common']['schema'],sms.sim_num,sms.inbox_id,log_type)) dbio.write(query, resource="sensor_analysis") print ('>> Log response') else: return False
def logger_response(sms, log_type, log='False'): """ - The process of logging the id of the match expression on table logger_respose. :param sms: list data info of sms message . :param Log_type: list data info of sms message . :param Log: Switch on or off the logging of the response. :type sms: list :type sms: str :type sms: str, Default(False) """ if log: query = ( "INSERT INTO logger_response (`logger_Id`, `inbox_id`, `log_type`)" "values((Select logger_id from logger_mobile where sim_num = %s order by" " date_activated desc limit 1),'%s','%s')" % (sms.sim_num, sms.inbox_id, log_type)) dbio.write(query, resource="sensor_data") print('>> Log response') else: return False
def create_public_alerts(): query = "CREATE TABLE `public_alerts` (" query += " `public_id` INT(5) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NULL," query += " `site_id` TINYINT(3) UNSIGNED NOT NULL," query += " `pub_sym_id` TINYINT(1) UNSIGNED NOT NULL," query += " `ts_updated` TIMESTAMP NULL," query += " PRIMARY KEY (`public_id`)," query += " UNIQUE INDEX `uq_public_alerts` (`ts` ASC, `site_id` ASC, `pub_sym_id` ASC)," query += " INDEX `fk_public_alerts_sites1_idx` (`site_id` ASC)," query += " CONSTRAINT `fk_public_alerts_sites1`" query += " FOREIGN KEY (`site_id`)" query += " REFERENCES `sites` (`site_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE," query += " INDEX `fk_public_alerts_public_alert_symbols1_idx` (`pub_sym_id` ASC)," query += " CONSTRAINT `fk_public_alerts_public_alert_symbols1`" query += " FOREIGN KEY (`pub_sym_id`)" query += " REFERENCES `public_alert_symbols` (`pub_sym_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE)" db.write(query)
def create_operational_triggers(): query = "CREATE TABLE `operational_triggers` (" query += " `trigger_id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT," query += " `ts` TIMESTAMP NULL," query += " `site_id` TINYINT(3) UNSIGNED NOT NULL," query += " `trigger_sym_id` TINYINT(2) UNSIGNED NOT NULL," query += " `ts_updated` TIMESTAMP NULL," query += " PRIMARY KEY (`trigger_id`)," query += " UNIQUE INDEX `uq_operational_triggers` (`ts` ASC, `site_id` ASC, `trigger_sym_id` ASC)," query += " INDEX `fk_operational_triggers_sites1_idx` (`site_id` ASC)," query += " CONSTRAINT `fk_operational_triggers_sites1`" query += " FOREIGN KEY (`site_id`)" query += " REFERENCES `sites` (`site_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE," query += " INDEX `fk_operational_triggers_operational_trigger_symbols1_idx` (`trigger_sym_id` ASC)," query += " CONSTRAINT `fk_operational_triggers_operational_trigger_symbols1`" query += " FOREIGN KEY (`trigger_sym_id`)" query += " REFERENCES `operational_trigger_symbols` (`trigger_sym_id`)" query += " ON DELETE NO ACTION" query += " ON UPDATE CASCADE)" db.write(query)
def log_csq(gsm, gsm_id): """ - The process that logs the gsm signal of the gsm id . :param gsm: List data of gsm. :param gsm_id: Id of the gsm in the database table. :type gsm: list :type gsm_id: int Returns: int: Signal value of gsm module. """ ts_today = dt.today().strftime('%Y-%m-%d %H:%M:%S') csq_val = gsm.csq() query = ("insert into gsm_csq_logs (`ts`,`gsm_id`,`csq_val`) " "values ('%s', %d, %d)") % (ts_today, gsm_id, csq_val) db.write(query=query, identifier="", last_insert=False, host="local") return csq_val
def main(): this_month = get_values("local_server_interface") next_month = get_values("local_server_interface_2") query = ("insert into monshiftsched (ts, iompmt, iompct, oomps, " "oompmt,oompct) values ") for row in this_month + next_month: query += "(" for item in row: if item == '-': query += "NULL," else: query += "'%s'," % (item) query = query[:-1] query += ")," query = query[:-1] query += (" on duplicate key update iompmt = values(iompmt), " "iompct = values(iompct), oomps = values(oomps), " "oompmt = values(oompmt), oompct = values(iompct)") db.write(query=query, resource="sensor_data")
def main(): eq_events = get_unprocessed() sym = get_alert_symbol() sites = get_sites() dfg = sites.groupby('site_id') eq_a = pd.DataFrame(columns=['site_id', 'eq_id', 'distance']) EVENTS_TABLE = 'earthquake_events' for i in eq_events.index: cur = eq_events.loc[i] mag = cur.magnitude eq_lat = cur.latitude eq_lon = cur.longitude ts = cur.ts critdist = get_crit_dist(mag) print(critdist) if False in np.isfinite([mag, eq_lat, eq_lon]): #has NaN value in mag, lat, or lon query = "UPDATE %s SET processed = -1 where eq_id = %s" % ( EVENTS_TABLE, i) dynadb.write(query=query, resource="sensor_data") continue if mag < 4: print("> Magnitude too small: %d" % (mag)) query = "UPDATE %s SET processed = 1 where eq_id = %s" % ( EVENTS_TABLE, i) dynadb.write(query=query, resource="sensor_data") continue else: print("> Magnitude reached threshold: %d" % (mag)) # magnitude is big enough to consider sites = dfg.apply(get_distance_to_eq, eq_lat=eq_lat, eq_lon=eq_lon) print(sites) crits = sites.loc[sites.distance <= critdist, :] if len(crits) == 0: print("> No affected sites. ") query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s" % ( EVENTS_TABLE, critdist, i) dynadb.write(query=query, resource="sensor_data") continue else: #merong may trigger print(">> Possible sites affected: %d" % (len(crits.site_id.values))) crits.loc[:, 'ts'] = ts crits.loc[:, 'source'] = 'earthquake' crits.loc[:, 'trigger_sym_id'] = sym crits.loc[:, 'ts_updated'] = ts crits.loc[:, 'eq_id'] = i eq_a = crits.loc[:, ['eq_id', 'site_id', 'distance']] op_trig = crits.loc[:, ['ts', 'site_id', 'trigger_sym_id', 'ts_updated']] # write to tables data_table = sms.DataTable("operational_triggers", op_trig) dynadb.df_write(data_table) data_table = sms.DataTable("earthquake_alerts", eq_a) dynadb.df_write(data_table) query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s " % ( EVENTS_TABLE, critdist, i) dynadb.write(query=query, resource="sensor_data") print(">> Alert iniated.\n")
def delete_public_alert(site_id, public_ts_start, connection='analysis'): query = "DELETE FROM public_alerts " query += "WHERE ts = '{}' ".format(public_ts_start) query += " AND site_id = {}".format(site_id) db.write(query, connection=connection)
def alert_to_db(df, table_name): """Summary of cumulative rainfall, threshold, alert and rain gauge used in analysis of rainfall. Args: df (dataframe): Dataframe to be written to database. table_name (str): Name of table df to be written to. """ if does_table_exist(table_name) == False: #Create a tsm_alerts table if it doesn't exist yet if table_name == 'tsm_alerts': create_tsm_alerts() #Create a public_alerts table if it doesn't exist yet elif table_name == 'public_alerts': create_public_alerts() #Create a operational_triggers table if it doesn't exist yet elif table_name == 'operational_triggers': create_operational_triggers() else: print_out('unrecognized table : ' + table_name) return if table_name == 'operational_triggers': # checks trigger source query = "SELECT * FROM " query += " operational_trigger_symbols AS op " query += "INNER JOIN " query += " trigger_hierarchies AS trig " query += "ON op.source_id = trig.source_id " all_trig = db.df_read(query) trigger_source = all_trig[all_trig.trigger_sym_id == \ df['trigger_sym_id'].values[0]]['trigger_source'].values[0] # does not write nd subsurface alerts if trigger_source == 'subsurface': alert_level = all_trig[all_trig.trigger_sym_id == \ df['trigger_sym_id'].values[0]]['alert_level'].values[0] if alert_level == -1: return # if ts does not exist, writes alert; else: updates alert level elif trigger_source == 'surficial': query = "SELECT trigger_id, trig.trigger_sym_id FROM " query += " (SELECT trigger_sym_id, alert_level, alert_symbol, " query += " op.source_id, trigger_source FROM " query += " operational_trigger_symbols AS op " query += " INNER JOIN " query += " (SELECT * FROM trigger_hierarchies " query += " WHERE trigger_source = '%s' " % trigger_source query += " ) AS trig " query += " ON op.source_id = trig.source_id " query += " ) AS sym " query += "INNER JOIN " query += " (SELECT * FROM operational_triggers " query += " WHERE site_id = %s " % df['site_id'].values[0] query += " AND ts = '%s' " % df['ts'].values[0] query += " ) AS trig " query += "ON trig.trigger_sym_id = sym.trigger_sym_id" surficial = db.df_read(query) if len(surficial) == 0: data_table = sms.DataTable(table_name, df) db.df_write(data_table) else: trigger_id = surficial['trigger_id'].values[0] trigger_sym_id = df['trigger_sym_id'].values[0] if trigger_sym_id != surficial['trigger_sym_id'].values[0]: query = "UPDATE %s " % table_name query += "SET trigger_sym_id = '%s' " % trigger_sym_id query += "WHERE trigger_id = %s" % trigger_id db.write(query) return query = "SELECT * FROM " query += " (SELECT trigger_sym_id, alert_level, alert_symbol, " query += " op.source_id, trigger_source FROM " query += " operational_trigger_symbols AS op " query += " INNER JOIN " query += " (SELECT * FROM trigger_hierarchies " query += " WHERE trigger_source = '%s' " % trigger_source query += " ) AS trig " query += " ON op.source_id = trig.source_id " query += " ) AS sym " query += "INNER JOIN " query += " ( " else: query = "" if table_name == 'tsm_alerts': where_id = 'tsm_id' else: where_id = 'site_id' ts_updated = pd.to_datetime( df['ts_updated'].values[0]) - timedelta(hours=0.5) # previous alert query += " SELECT * FROM %s " % table_name query += " WHERE %s = %s " % (where_id, df[where_id].values[0]) query += " AND ((ts <= '%s' " % df['ts_updated'].values[0] query += " AND ts_updated >= '%s') " % df['ts_updated'].values[0] query += " OR (ts_updated <= '%s' " % df['ts_updated'].values[0] query += " AND ts_updated >= '%s')) " % ts_updated if table_name == 'operational_triggers': query += " ) AS trig " query += "ON trig.trigger_sym_id = sym.trigger_sym_id " query += "ORDER BY ts DESC LIMIT 1" df2 = db.df_read(query) if table_name == 'public_alerts': query = "SELECT * FROM %s " % table_name query += "WHERE site_id = %s " % df['site_id'].values[0] query += "AND ts = '%s' " % df['ts'].values[0] query += "AND pub_sym_id = %s" % df['pub_sym_id'].values[0] df2 = df2.append(db.df_read(query)) # writes alert if no alerts within the past 30mins if len(df2) == 0: data_table = sms.DataTable(table_name, df) db.df_write(data_table) # does not update ts_updated if ts in written ts to ts_updated range elif pd.to_datetime(df2['ts_updated'].values[0]) >= \ pd.to_datetime(df['ts_updated'].values[0]): pass # if diff prev alert, writes to db; else: updates ts_updated else: if table_name == 'tsm_alerts': alert_comp = 'alert_level' pk_id = 'ta_id' elif table_name == 'public_alerts': alert_comp = 'pub_sym_id' pk_id = 'public_id' else: alert_comp = 'trigger_sym_id' pk_id = 'trigger_id' same_alert = df2[alert_comp].values[0] == df[alert_comp].values[0] try: same_alert = same_alert[0] except: pass if not same_alert: data_table = sms.DataTable(table_name, df) db.df_write(data_table) else: query = "UPDATE %s " % table_name query += "SET ts_updated = '%s' " % df['ts_updated'].values[0] query += "WHERE %s = %s" % (pk_id, df2[pk_id].values[0]) db.write(query)
def write_inbox(msglist='',gsm_info='',resource="sms_data"): """ **Description:** -The write raw sms to database function that write raw message in database. :param msglist: The message list. :param gsm_info: The gsm_info that being use. :type msglist: obj :type gsm_info: obj :returns: N/A """ if not msglist: raise ValueError("No msglist definition") if not gsm_info: raise ValueError("No gsm_info definition") sc = mem.server_config() mobile_nums_db = sc["resource"]["mobile_nums_db"] logger_mobile_sim_nums = static.get_mobiles(table='loggers', host=mobile_nums_db, resource="sms_data") user_mobile_sim_nums = static.get_mobiles(table='users', host=mobile_nums_db, resource="sms_data") # gsm_ids = get_gsm_modules() ts_stored = dt.today().strftime("%Y-%m-%d %H:%M:%S") gsm_id = gsm_info['id'] loggers_count = 0 users_count = 0 query_loggers = ("insert into smsinbox_loggers (ts_sms, ts_stored, mobile_id, " "sms_msg,read_status,gsm_id) values ") query_users = ("insert into smsinbox_users (ts_sms, ts_stored, mobile_id, " "sms_msg,read_status,gsm_id) values ") sms_id_ok = [] sms_id_unk = [] ts_sms = 0 # ltr_mobile_id= 0 for m in msglist: # print m.simnum, m.data, m.dt, m.num ts_sms = m.dt sms_msg = m.data read_status = 0 if m.simnum in logger_mobile_sim_nums.keys(): query_loggers += "('%s','%s',%d,'%s',%d,%d)," % (ts_sms, ts_stored, logger_mobile_sim_nums[m.simnum], sms_msg, read_status, gsm_id) # ltr_mobile_id= logger_mobile_sim_nums[m.simnum] loggers_count += 1 elif m.simnum in user_mobile_sim_nums.keys(): query_users += "('%s','%s',%d,'%s',%d,%d)," % (ts_sms, ts_stored, user_mobile_sim_nums[m.simnum], sms_msg, read_status, gsm_id) users_count += 1 else: print ('Unknown number', m.simnum) sms_id_unk.append(m) continue sms_id_ok.append(m.num) query_loggers = query_loggers[:-1] query_users = query_users[:-1] sc = mem.server_config() sms_host = sc["resource"]["smsdb"] if len(sms_id_ok)>0: if loggers_count > 0: dbio.write(query=query_loggers, host=sms_host, resource=resource) if users_count > 0: dbio.write(query=query_users, host=sms_host, resource=resource) if len(sms_id_unk)>0: for msg_details in sms_id_unk: check_if_existing = "SELECT * FROM mobile_numbers where sim_num = '%s'" % msg_details.simnum is_exist = dbio.read(query=check_if_existing, host=sms_host, resource=resource) if len(is_exist) == 0: random_id = random.randint(200,999999)*5 new_unknown_query = 'INSERT INTO users VALUES (0,"UN",'\ '"UNKNOWN_%d","UNKNOWN","UNKNOWN_%d","UNKNOWN",' \ '"1994-08-16","M","1")' % (random_id, random_id) dbio.write(query=new_unknown_query, host=sms_host, resource=resource) query_insert_mobile_details = 'insert into mobile_numbers (sim_num,gsm_id) values ' \ '("%s","%s")' % (msg_details.simnum,gsm_id) mobile_id = dbio.write(query=query_insert_mobile_details, host=sms_host, resource=resource, last_insert=True) query_insert_mobile_details = 'insert into user_mobiles (user_id,' \ 'mobile_id,priority,status) values ' \ '((SELECT user_id FROM users WHERE firstname = "UNKNOWN_%s"),"%s","%s","%s")' % (random_id,mobile_id,'1','1') dbio.write(query=query_insert_mobile_details, host=sms_host, resource=resource) user_mobile_sim_nums[mobile_id] = msg_details.simnum query_users += "('%s','%s','%s','%s',%d,%d)" \ % (msg_details.dt, ts_stored, mobile_id, msg_details.data, 0, gsm_id) dbio.write(query=query_users, host=sms_host, resource=resource) else: get_mobile_id_query = "SELECT mobile_id FROM mobile_numbers WHERE sim_num = '%s'" % msg_details.simnum mobile_id = dbio.read(query=get_mobile_id_query, host=sms_host, resource=resource) query_users += "('%s','%s','%s','%s',%d,%d)" \ % (msg_details.dt, ts_stored, mobile_id[0][0], msg_details.data, 0, gsm_id) dbio.write(query=query_users, host=sms_host, resource=resource)
def write_outbox(message=None, recipients=None, table=None, resource="sms_data", with_mobile_id=False): """ **Description:** -The write outbox message to database is a function that insert message to smsoutbox with timestamp written,message source and mobile id. :param message: The message that will be sent to the recipients. :param recipients: The number of the recipients. :param gsm_id: The gsm id . :param table: table use of the number. :type message: str :type recipients: str :type recipients: int :type table: str :returns: N/A """ # if table == '': # print "Error: No table indicated" # raise ValueError # return sc = mem.server_config() mc = mem.get_handle() host = sc['resource']['smsdb'] tsw = dt.today().strftime("%Y-%m-%d %H:%M:%S") if not message: raise ValueError("No message specified for sending") if type(recipients) == type(None): raise ValueError("No recipients specified for sending") elif type(recipients).__name__ == 'str': recipients = recipients.split(",") if not table: table_name = check_number_in_table(recipients[0]) if not table_name: print("No record for '%s" % (recipients[0])) return else: table_name = table query = ("insert into smsoutbox_%s (ts_written,sms_msg,source) VALUES " "('%s','%s','central')") % (table_name,tsw,message) outbox_id = dbio.write(query=query, identifier="womtdb", last_insert=True, host=host, resource=resource)[0][0] query = ("INSERT INTO smsoutbox_%s_status (outbox_id,mobile_id,gsm_id)" " VALUES ") % (table_name[:-1]) if with_mobile_id: recipients.loc[:, 'outbox_id'] = outbox_id query += str(list(recipients[['outbox_id', 'mobile_id', 'gsm_id']].to_records(index=False)))[1:-1] else: table_mobile = static.get_mobiles(table_name, host) def_gsm_id = mc.get(table_name[:-1] + "_mobile_def_gsm_id") for r in recipients: try: mobile_id = table_mobile[r] gsm_id = def_gsm_id[mobile_id] query += "(%d, %d, %d)," % (outbox_id, mobile_id, gsm_id) except KeyError: print (">> Error: Possible key error for", r) continue query = query[:-1] dbio.write(query=query, identifier="womtdb", last_insert=False, host=host, resource=resource)
def process_gateway_msg(sms): """ - The process of processing the gateway message parser for sms data and save data to database. :param sms: list data info of sms message . :type msg: list Returns: bool: True output for success parsing and return False if fails. """ print(">> Coordinator message received") print(sms.msg) # dbio.create_table("coordrssi","coordrssi") routers = get_router_ids() sms.msg = re.sub("(?<=,)(?=(,|$))", "NULL", sms.msg) try: datafield = sms.msg.split('*')[1] timefield = sms.msg.split('*')[2] timestamp = dt.strptime(timefield, "%y%m%d%H%M%S").strftime("%Y-%m-%d %H:%M:%S") smstype = datafield.split(',')[0] # process rssi parameters if smstype == "RSSI": # site_name = datafield.split(',')[1 rssi_string = datafield.split(',', 2)[2] print(rssi_string) # format is # <router name>,<rssi value>,... query = ("INSERT IGNORE INTO router_rssi " "(ts, logger_id, rssi_val) VALUES ") tuples = re.findall("[A-Z]+,\d+", rssi_string) count = 0 for item in tuples: try: query += "('%s',%d,%s)," % (timestamp, routers[item.split( ',')[0].lower()], item.split(',')[1]) count += 1 except KeyError: print('Key error for', item) continue query = query[:-1] # print query if count != 0: print('count', count) dbio.write(query, resource="sensor_data") else: print('>> no data to commit') return True else: print(">> Processing coordinator weather") except IndexError: print("IndexError: list index out of range") logger_response(sms, 14, True) except: print(">> Unknown Error", sms.msg) return False
def process_piezometer(sms): """ - The process of parsing process_piezometer data. :param sms: list data info of sms message . :type sms: list Returns: bool: True output for success parsing and return False if fails. """ #msg = message line = sms.msg print('Piezometer data: ' + line) line = re.sub("\*\*", "*", line) try: #PUGBPZ*13173214*1511091800 linesplit = line.split('*') msgname = linesplit[0].lower() msgname = re.sub("due", "", msgname) msgname = re.sub("pz", "", msgname) msgname = re.sub("ff", "", msgname) if len(msgname) == 3: msgname = msgname + 'pz' print('msg_name: ' + msgname) data = linesplit[1] data = re.sub("F", "", data) print("data:", data) # msgid = int(('0x'+data[:4]), 16) # p1 = int(('0x'+data[4:6]), 16)*100 # p2 = int(('0x'+data[6:8]), 16) # p3 = int(('0x'+data[8:10]), 16)*.01 p1 = int(('0x' + data[:2]), 16) * 100 p2 = int(('0x' + data[2:4]), 16) p3 = int(('0x' + data[4:6]), 16) * .01 piezodata = p1 + p2 + p3 t1 = int(('0x' + data[6:8]), 16) t2 = int(('0x' + data[8:10]), 16) * .01 tempdata = t1 + t2 try: txtdatetime = dt.strptime( linesplit[2], '%y%m%d%H%M%S').strftime('%Y-%m-%d %H:%M:00') except ValueError: txtdatetime = dt.strptime( linesplit[2], '%y%m%d%H%M').strftime('%Y-%m-%d %H:%M:00') if int(txtdatetime[0:4]) < 2009: txtdatetime = sms.ts except (IndexError, AttributeError): print('\n>> Error: Piezometer message format is not recognized') print(line) return except ValueError: print('>> Error: Possible conversion mismatch ' + line) return # try: # dbio.create_table(str(msgname), "piezo") try: query = ( "INSERT INTO piezo_%s (ts, frequency_shift, temperature ) VALUES" " ('%s', %s, %s)") % (msgname, txtdatetime, str(piezodata), str(tempdata)) # print query # print query except ValueError: print('>> Error writing query string.', ) return False try: dbio.write(query, resource="sensor_data") except MySQLdb.ProgrammingError: print('>> Unexpected programing error') return False print('End of Process Piezometer data') return True