def update_narratives_on_db(narrative_id, site_id, timestamp, narrative, type_id, user_id, event_id=None): """ """ print(get_process_status_log("update_narratives_on_db", "start")) try: narrative_row = Narratives.query.filter_by(id=narrative_id).first() if narrative_row: narrative_row.site_id = site_id narrative_row.timestamp = timestamp narrative_row.narrative = narrative narrative_row.type_id = type_id narrative_row.user_id = user_id narrative_row.event_id = event_id else: print(get_process_status_log("Narrative not found!", "fail")) raise Exception("Narrative not found!") except Exception as err: print(err) raise return "Success"
def get_issues_and_reminders(offset=None, limit=None, start=None, end=None, site_ids=None, include_count=None, search=None, event_id=None, include_expired=None): """ Returns one or more row/s of narratives. Args: offset (Integer) - limit (Integer) - start () - end () - include_count (Boolean) search (String) event_id (Integer) """ print(get_process_status_log("get_issues_and_reminders", "start")) iar = IssuesAndReminders irp = IssuesRemindersSitePostings # base = DB.session.query(iar) base = iar.query.options(joinedload(iar.postings).joinedload( irp.event)).filter(iar.resolution == None) return_data = None if start and end: base = base.filter(iar.ts_posted.between(start, end)) if not event_id: if search: base = base.filter(iar.detail.ilike("%" + search + "%")) if not include_expired: base = base.filter( DB.or_(iar.ts_expiration > datetime.now(), iar.ts_expiration == None)) issues_and_reminders = base.order_by(DB.desc( iar.ts_posted)).limit(limit).offset(offset).all() DB.session.commit() if include_count: count = get_issues_count(base) return_data = [issues_and_reminders, count] else: return_data = issues_and_reminders else: issues_and_reminders = base.order_by(DB.desc( iar.timestamp)).filter(iar.event_id == event_id).all() DB.session.commit() return_data = issues_and_reminders print(get_process_status_log("get_issues_and_reminders", "end")) return return_data
def update_alert_gen(site_code=None): """ May be used to update all alert_gen related data when a change was made either by validating triggers or an insert was made. Compared to the function above, this function handles all three important data for the dashboard. Mainly the ff: 1. generated alerts - current trigger and alert status of sites 2. candidate alerts - potential releases for sites 3. alerts from db - current validated/released status of the sites Args: site_code (String) - may be provided if only one site is affected by the changes you did. No return. Websocket emit_data handles all returns. """ print(get_process_status_log("Update Alert Generation", "start")) try: generated_alerts = retrieve_data_from_memcache("GENERATED_ALERTS") site_gen_alert = generate_alerts(site_code) if site_code: load_site_gen_alert = json.loads(site_gen_alert) site_gen_alert = load_site_gen_alert.pop() # Find the current entry for the site provided json_generated_alerts = json.loads(generated_alerts) gen_alert_row = next( filter(lambda x: x["site_code"] == site_code, json_generated_alerts), None) if gen_alert_row: # Replace rather update alertgen entry gen_alert_index = json_generated_alerts.index(gen_alert_row) json_generated_alerts[gen_alert_index] = site_gen_alert set_data_to_memcache(name="GENERATED_ALERTS", data=json.dumps(json_generated_alerts)) set_data_to_memcache(name="ALERTS_FROM_DB", data=wrap_get_ongoing_extended_overdue_events()) set_data_to_memcache(name="CANDIDATE_ALERTS", data=candidate_alerts_generator.main()) except Exception as err: print(err) raise print(get_process_status_log("emitting updated alert gen data", "start")) emit_data("receive_generated_alerts") emit_data("receive_alerts_from_db") emit_data("receive_candidate_alerts") print(get_process_status_log("emitting updated alert gen data", "end")) print(get_process_status_log("update alert gen", "end"))
def delete_narratives_from_db(narrative_id): """ """ print(get_process_status_log("delete_narratives_from_db", "start")) try: narrative_for_delete = Narratives.query.filter( Narratives.id == narrative_id).first() DB.session.delete(narrative_for_delete) # DB.session.commit() print(get_process_status_log("delete_narratives_from_db", "end")) except: print(get_process_status_log("delete_narratives_from_db", "fail")) raise return "Success"
def execute_insert_ewi(insert_details): """ Function used to prepare the whole insert_ewi process. """ # Insert ewi release status = insert_ewi(insert_details) # Prepare process status log status_log = get_process_status_log("insert_ewi", status) # Update the complete alert gen data # site_code = insert_details["site_code"].lower() # update_alert_gen(site_code=site_code) set_data_to_memcache(name="ALERTS_FROM_DB", data=wrap_get_ongoing_extended_overdue_events()) set_data_to_memcache(name="CANDIDATE_ALERTS", data=candidate_alerts_generator.main()) emit_data("receive_alerts_from_db") emit_data("receive_candidate_alerts") # return "status_log" return status_log
def execute_write_issues_reminders(issues_and_reminders_details): data = issues_and_reminders_details try: try: postings = data["postings"] except KeyError: postings = None result = write_issue_reminder_to_db( iar_id=data["iar_id"], detail=data["detail"], user_id=data["user_id"], ts_posted=data["ts_posted"], ts_expiration=data["ts_expiration"], resolved_by=data["resolved_by"], resolution=data["resolution"], ts_resolved=data["ts_resolved"], site_id_list=data["site_id_list"], is_event_entry=data["is_event_entry"], postings=postings ) if result == "success": DB.session.commit() else: DB.session.rollback() except: DB.session.rollback() # Prepare process status log status_log = get_process_status_log("request_to_handle_iar", "end") return status_log
def process_transaction_logs(site_id_list, postings, is_event_entry, new_issue_and_reminder_id=None): """ """ # Write the transaction log try: print(get_process_status_log("process_transaction_logs", "start")) if site_id_list: for site_id in site_id_list: is_site_posted = False if postings: is_site_posted = bool( next( filter(lambda x: x["site_id"] == site_id, postings), None)) if not is_site_posted: write_iar_transaction_entry(new_issue_and_reminder_id, is_event_entry=is_event_entry, site_id=site_id) # Delete unneeded logs if postings: for post in postings: site_id = post["site_id"] if site_id_list: if site_id not in site_id_list: event_id = post["event_id"] delete_issues_reminders_site_postings( site_id, event_id) DB.session.commit() except Exception as err: DB.session.rollback() print("Problem in process transaction log") print(err) raise print(get_process_status_log("process_transaction_logs", "end"))
def handle_message(payload): """ This handles all messages and connects per message to it's corresponding functions. """ key = payload["key"] data = payload["data"] if key == "insert_ewi": print(get_process_status_log("insert_ewi", "request")) var_checker("insert data", data, True) status = execute_insert_ewi(data) print(status) elif key == "validate_trigger": print(get_process_status_log("validate_trigger", "request")) status = execute_alert_status_validation(data) print(status) elif key == "write_issues_and_reminders": print(get_process_status_log("write_issue_reminder_to_db", "request")) var_checker("data", data, True) status = execute_write_issues_reminders(data) set_data_to_memcache("ISSUES_AND_REMINDERS", wrap_get_issue_reminder()) emit_data("receive_issues_and_reminders") print(status) elif key == "write_monitoring_moms_to_db": print(get_process_status_log("write_monitoring_moms_to_db", "request")) status = execute_write_monitoring_moms_to_db(data) print(status) elif key == "update_monitoring_tables": print(get_process_status_log("update_monitoring_tables", "request")) # NOTE: UNFINISHED BUSINESS elif key == "run_alert_generation": print(get_process_status_log("run_alert_generation", "request")) site_code = None if data: site_code = data["site_code"] update_alert_gen(site_code=site_code) elif key == "update_db_alert_ewi_sent_status": print(get_process_status_log("update_db_alert_ewi_sent_status", "request")) execute_update_db_alert_ewi_sent_status( data["alert_db_group"], data["site_id"], data["ewi_group"]) else: print("ERROR: Key provided not found.") raise Exception("WEBSOCKET MESSAGE: KEY NOT FOUND")
def write_narratives_to_db(site_id, timestamp, narrative, type_id, user_id, event_id=None): """ Insert method for narratives table. Returns new narrative ID. Args: site_id (Integer) event_id (Integer) timestamp (DateTime) narratives (String) Returns narrative ID. """ print(get_process_status_log("write_narratives_to_db", "start")) try: narrative = Narratives(site_id=site_id, event_id=event_id, timestamp=timestamp, narrative=narrative, type_id=type_id, user_id=user_id) DB.session.add(narrative) DB.session.flush() new_narrative_id = narrative.id except Exception as err: print(err) DB.session.rollback() raise print(get_process_status_log("write_narratives_to_db", "end")) return new_narrative_id
def execute_alert_status_validation(as_details): """ Function used to prepare the whole validation process i.e. setting trigger validity, and update of alert gen data. """ # Update the trigger validity status = update_alert_status(as_details) # Prepare process status log status_log = get_process_status_log("update_alert_status", status) # Update the complete alert gen data site_code = as_details["site_code"].lower() update_alert_gen(site_code=site_code) return status_log
def wrap_write_narratives_to_db(): """ Writes narratives to database. """ try: json_data = request.get_json() var_checker("json_data", json_data, True) site_list = [] try: site_list = json_data["site_list"] print(get_process_status_log("Multiple Site Narrative", "start")) is_multiple_insert = True except KeyError: raise narrative = str(json_data["narrative"]) type_id = json_data["type_id"] event_id = None user_id = json_data["user_id"] timestamp = json_data["timestamp"] if not isinstance(timestamp, datetime): timestamp = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S") try: narrative_id = json_data["narrative_id"] except KeyError: narrative_id = None # UPDATING OF NARRATIVE if narrative_id: for site_id in site_list: has_event_id = bool(json_data["event_id"]) if has_event_id: event_id = json_data["event_id"] else: event = find_narrative_event_id(timestamp, site_id) if event: event_id = event.event_id else: raise Exception( get_process_status_log("INSERT NARRATIVES", "fail")) var_checker("narrative_id", narrative_id, True) status = update_narratives_on_db(narrative_id=narrative_id, site_id=site_id, timestamp=timestamp, narrative=narrative, type_id=type_id, user_id=user_id, event_id=event_id) print( get_process_status_log( f"{status} updated narrative with ID: {narrative_id}", "end")) # INSERT OF NARRATIVE else: for site_id in site_list: has_event_id = bool(json_data["event_id"]) if has_event_id: event_id = json_data["event_id"] else: event_id = find_narrative_event_id(timestamp, site_id) # if event: narrative_id = write_narratives_to_db(site_id=site_id, timestamp=timestamp, narrative=narrative, type_id=type_id, user_id=user_id, event_id=event_id) print( get_process_status_log( f"New narrative with ID {narrative_id}", "end")) # else: # print(get_process_status_log(f"No event found in specified timestamp on site {site_id} | {timestamp}", "fail")) # raise Exception(get_process_status_log("NO EVENT IN SPECIFIED TIMESTAMP", "fail")) # If nothing goes wrong: DB.session.commit() except Exception as err: print("MAIN") print(err) raise return "success"
def write_issue_reminder_to_db(iar_id, detail, user_id, ts_posted, ts_expiration, resolved_by, resolution, ts_resolved, site_id_list, is_event_entry, postings=None): """ Insert method for issues_and_reminders table. Returns new issues_and_reminder ID. Args: detail user_id ts_posted ts_expiration resolved_by resolution is_event_entry """ try: if not ts_posted: ts_posted = datetime.now() else: if not isinstance(ts_posted, datetime): ts_posted = datetime.strptime(ts_posted, "%Y-%m-%d %H:%M:%S") if not isinstance(ts_expiration, datetime): if ts_expiration != "Invalid date": ts_expiration = datetime.strptime(ts_expiration, "%Y-%m-%d %H:%M:%S") else: ts_expiration = None except Exception as err: print(err) pass try: issue_reminder_row = IssuesAndReminders.query.filter_by( iar_id=iar_id).first() issue_and_reminder_id = iar_id if issue_reminder_row: print( get_process_status_log("update_issue_reminder_on_db", "start")) issue_reminder_row.detail = detail issue_reminder_row.user_id = user_id issue_reminder_row.ts_posted = ts_posted issue_reminder_row.ts_expiration = ts_expiration issue_reminder_row.resolution = resolution issue_reminder_row.resolved_by = resolved_by issue_reminder_row.ts_resolved = ts_resolved DB.session.commit() # issue_reminder_row.site_id_list = site_id_list # issue_reminder_row.is_event_entry = is_event_entry print(get_process_status_log("update_issue_reminder_on_db", "end")) else: print(get_process_status_log("write_issue_reminder_to_db", "start")) issue_and_reminder = IssuesAndReminders( detail=detail, user_id=user_id, ts_posted=ts_posted, ts_expiration=ts_expiration, resolved_by=resolved_by, resolution=resolution, ts_resolved=ts_resolved) DB.session.add(issue_and_reminder) DB.session.flush() issue_and_reminder_id = issue_and_reminder.iar_id print(get_process_status_log("write_issue_reminder_to_db", "end")) process_transaction_logs(site_id_list, postings, is_event_entry, issue_and_reminder_id) except Exception as err: print(err) raise return "success"
def main(ts=None, generated_alerts_list=None, db_alerts_dict=None): """ Args: ts (Str Datetime) generated_alerts_list (List) - provided thru websocket if not reading from file check_legacy_candidate (Bool) - show generated candidate alert entry for released MonitoringReleases entries; Rationale: If data_ts already exists in MonitoringReleases, release will not be included in Candidates Alert Generation """ print(get_process_status_log("Candidate Alerts Processing", "start")) start_run_ts = datetime.now() query_end_ts = datetime.now() if ts: query_end_ts = datetime.strptime(ts, "%Y-%m-%d %H:%M:%S") #################### # START OF PROCESS # #################### # If no generated alerts sent thru argument, read from # file. if not generated_alerts_list: filepath = APP_CONFIG["generated_alerts_path"] filename = "generated_alerts.json" generated_alerts_list = get_generated_alerts_list_from_file( filepath, filename) load_generated_alerts = json.loads(generated_alerts_list) if db_alerts_dict: db_alerts_dict = json.loads(db_alerts_dict) else: db_alerts_dict = get_ongoing_extended_overdue_events(query_end_ts) # Split site with alerts and site with no alerts with_alerts, without_alerts = separate_with_alerts_wo_alerts( load_generated_alerts) # PROCESS CANDIDATES candidate_alerts_list = process_candidate_alerts(with_alerts, without_alerts, db_alerts_dict, query_end_ts) # NOTE: TAG LOWERING CANDIDATES # candidate_alerts_list = remove_for_lowering_sites( # candidate_alerts_list, db_alerts_dict) # Convert data to JSON json_data = json.dumps(candidate_alerts_list) # Write to specified filepath and filename directory = APP_CONFIG["generated_alerts_path"] if not os.path.exists(directory): os.makedirs(directory) with open(directory + "/candidate_alerts.json", "w") as file_path: file_path.write(json_data) end_run_ts = datetime.now() run_time = end_run_ts - start_run_ts print(f"RUNTIME: {run_time} | Done generating Candidate Alerts!") print("") return json_data
def handle_update_insert_tags(): """ Function that insert tags """ tag_data = request.get_json() contact_person = tag_data["contact_person"] message = tag_data["message"] tag_type = tag_data["tag_type"] tag_details = tag_data["tag_details"] tag_id_list = tag_details["tag_id_list"] site_id_list = tag_details["site_id_list"] for tag_id in tag_id_list: tag_row = get_tag_by_type(tag_type, tag_details, tag_id) user_id = tag_details["user_id"] if tag_row: response = update_data_tag(row_to_update=tag_row, tag_details=tag_details, tag_id=tag_id) else: response = insert_data_tag(tag_type=tag_type, tag_details=tag_details, tag_id=tag_id) # Get tag description # tag_description = get_tag_description(tag_id=tag_id, tag_type=tag_type) tag_description = get_tag_description(tag_id=tag_id) var_checker("tag_description", tag_description, True) # TODO: change tags when new tags came or use tag_ids if tag_description in ["#GroundMeas", "#GroundObs", "#EwiResponse"]: get_process_status_log(key="Writing narratives", status="request") additional_data = contact_person if tag_description in ["#GroundObs", "#EwiResponse"]: additional_data += f" - {message}" narrative = get_narrative_text(narrative_type="sms_tagging", details={ "tag": tag_description, "additional_data": additional_data }) var_checker("narrative", narrative, True) get_process_status_log( "inserting narratives with provided site_id_list", "request") try: for site_id in site_id_list: # TODO: Make sure that this would handle routine in the future. event = get_latest_monitoring_event_per_site(site_id) var_checker("event", event, True) if event.status == 2: event_id = event.event_id narrative_id = write_narratives_to_db( site_id=site_id, timestamp=datetime.now(), narrative=narrative, type_id=1, user_id=user_id, event_id=event_id) print("narrative_id", narrative_id) except Exception as err: var_checker("error in writing narrative in insert tag api", err, True) get_process_status_log( "inserting narratives with provided site_id_list", "fail") raise get_process_status_log( "inserting narratives with provided site_id_list", "success") var_checker("response of insert", response, True) # Single Commit for all DB.session.commit() return jsonify({"message": "success", "status": True})
def release_scenario_two(part_number, ts=None, ts_updated=None): """ Scenario Two: Rainfall alert and operational trigger and subsurface """ try: get_process_status_log("", "request") input_ts = datetime.now() input_ts_u = datetime.now() if ts: input_ts = datetime.strptime(ts, "%Y-%m-%d %H:%M:%S") if ts_updated: input_ts_u = datetime.strptime(ts_updated, "%Y-%m-%d %H:%M:%S") if part_number == 1: """ PART 1 -> Insert subsurface alert THEN invalidate sa WEB UI """ trigger_id = add_subsurface_alert(site_id=42, ts=input_ts, alert_level=2) alert_details = { "trigger_id": trigger_id, "alert_status": None, # -1 -> invalid, 0 -> validating, 1 - valid, None -> System generated "remarks": "", "user_id": 1 } update_alert_status(alert_details) print("SUBSURFACE ALERT OK!") elif part_number == 2: """ PART 2 -> Once invalidated, after 30mins, may RAINFALL alert. tapos validate sa WEB UI """ trigger_id = add_rainfall_alert(site_id=42, is_triggering=1, ts=input_ts, ts_updated=input_ts_u) alert_details = { "trigger_id": trigger_id, "alert_status": None, # -1 -> invalid, 0 -> validating, 1 - valid, None -> System generated "remarks": "", "user_id": 1 } update_alert_status(alert_details) print("RAINFALL ALERT OK!") elif part_number == 3: """ PART 3 -> Once validated, RETRIGGER rainfall alert 1 HOUR before release time PERO HINDI isasama sa release """ trigger_id = add_rainfall_alert(site_id=42, is_triggering=1, ts=input_ts, ts_updated=input_ts_u) print("RAINFALL ALERT OK!") elif part_number == 4: """ PART 4 -> END OF VALIDITY, lowering dapat PERO NO GROUND DATA so extend release. """ trigger_id = add_rainfall_alert(site_id=42, is_triggering=0, ts=input_ts, ts_updated=input_ts_u) print("RAINFALL ALERT OK!") elif part_number == 5: """ PART 5 -> END OF VALIDITY, may ground data. Lowering na talaga. NOTE: MAKE SURE NA MAY SAME VALUE UNG LAST OBSERVATION AT 30mins before ang data """ # Release rainfall trigger_id_rain = add_rainfall_alert(site_id=42, is_triggering=0, ts=input_ts, ts_updated=input_ts_u) trigger_id_surf = add_surficial_alert(site_id=42, ts=input_ts, alert_level=0, measurement=50) print("LOWERING DATA OK!") # TODO: check kung okay na ung scenario 2 DB.session.commit() return "success" except Exception as err: DB.session.rollback() print(err) return "scenario insert failed"
def release_scenario_one(part_number, ts=None, ts_updated=None): """ Scenario One: Rainfall alert and operational trigger """ try: get_process_status_log(f"release_scenario_one {part_number}", "request") input_ts = datetime.now() input_ts_u = datetime.now() if ts: input_ts = datetime.strptime(ts, "%Y-%m-%d %H:%M:%S") if ts_updated: input_ts_u = datetime.strptime(ts_updated, "%Y-%m-%d %H:%M:%S") if part_number == 1: print("PART 1") trigger_id = add_rainfall_alert(site_id=1, is_triggering=1, ts=input_ts, ts_updated=input_ts_u) alert_details = { "trigger_id": trigger_id, "alert_status": None, # -1 -> invalid, 0 -> validating, 1 - valid, None -> System generated "remarks": "", "user_id": 1 } update_alert_status(alert_details) print("RAINFALL ALERT OK!") elif part_number == 2: print("PART 2") trigger_id = add_surficial_alert(site_id=41, ts=input_ts, alert_level=2, measurement=50) alert_details = { "trigger_id": trigger_id, "alert_status": None, # -1 -> invalid, 0 -> validating, 1 - valid, None -> System generated "remarks": "", "user_id": 1 } update_alert_status(alert_details) print("SURFICIAL ALERT OK!") else: print("invalid part number") # WHEN ALL THINGS ARE OVER DB.session.commit() get_process_status_log(f"release_scenario_one {part_number}", "done") print("UMABOT DITO") return "success" except Exception as err: DB.session.rollback() print(err) return "scenario insert failed"