def get_community_users(include_relationships=False, include_mobile_nums=False, include_orgs=False, include_hierarchy=False, include_team=False, return_schema_format=False, filter_by_site=None, filter_by_org=None, filter_by_mobile_id=None): """ Function that gets all commmunity users and related data filter_by_site (list): contains list of site codes to filter. Default value is empty list filter_by_org (list): contains list of organizations to filter (i.e. LEWC, BLGU, etc). Default value is empty list """ var_checker("filter_by_site", filter_by_site, True) filter_by_site = filter_by_site or [] filter_by_org = filter_by_org or [] filter_by_mobile_id = filter_by_mobile_id or [] users = get_users(include_relationships=include_relationships, include_mobile_nums=include_mobile_nums, include_orgs=include_orgs, include_hierarchy=include_hierarchy, include_team=include_team, return_schema_format=return_schema_format, user_group="community", filter_by_site=filter_by_site, filter_by_org=filter_by_org, filter_by_mobile_id=filter_by_mobile_id) return users
def get_routine_ewi_template(): """ """ template = create_ewi_message(release_id=None) var_checker("template", template, True) return template
def wrap_get_shift_data(): """ Gets a single release with the specificied ID """ json_input = request.get_json() if not json_input: return # Search releases by if "ts_start" in json_input and "ts_end" in json_input: user_id = None try: user_id = json_input["user_id"] except KeyError: pass ts_start = json_input["ts_start"] ts_end = json_input["ts_end"] releases_list = get_monitoring_releases(ts_start=ts_start, ts_end=ts_end, user_id=user_id, exclude_routine=True) else: var_checker("NO SHIFTS", "no shifts", True) if "user_id" in json_input: releases_data = group_by_date(releases_list) else: releases_data = group_by_alert(releases_list) return jsonify(releases_data)
def wrap_write_issue_reminder_to_db(): """ """ try: json_data = request.get_json() var_checker("json_data", json_data, True) detail = json_data["detail"] user_id = int(json_data["user_id"]) ts_posted = json_data["ts_posted"] ts_expiration = json_data["ts_expiration"] resolved_by = int(json_data["resolved_by"]) resolution = json_data["resolution"] ts_resolved = json_data["ts_resolved"] site_id_list = json_data["site_id_list"] is_event_entry = json_data["is_event_entry"] try: postings = json_data["postings"] except KeyError: postings = None status = write_issue_reminder_to_db(json_data["iar_id"], detail, user_id, ts_posted, ts_expiration, resolved_by, resolution, ts_resolved, site_id_list, is_event_entry) # DB.session.rollback() DB.session.commit() except Exception as err: DB.session.rollback() raise return status
def wrap_write_monitoring_moms_to_db(internal_json=None): """ Handles moms. Make sure you pass lists to this function """ try: if internal_json: json_data = internal_json else: json_data = request.get_json() var_checker("json_data", json_data, True) site_code = json_data["site_code"] site_id = DB.session.query(Sites).options( raiseload("*")).filter_by(site_code=site_code).first().site_id var_checker("site_id", site_id, True) moms_list = json_data["moms_list"] for moms_obs in moms_list: write_monitoring_moms_to_db(moms_details=moms_obs, site_id=site_id) DB.session.commit() except Exception as err: print("YOWO", err) DB.session.rollback() return jsonify({"status": False, "message": err}) return jsonify({"status": True, "message": "success"})
def handle_message(payload): """ This handles all messages and connects per message to it's corresponding functions. """ key = payload["key"] data = payload["data"] if key == "insert_ewi": print(get_process_status_log("insert_ewi", "request")) var_checker("insert data", data, True) status = execute_insert_ewi(data) print(status) elif key == "validate_trigger": print(get_process_status_log("validate_trigger", "request")) status = execute_alert_status_validation(data) print(status) elif key == "write_issues_and_reminders": print(get_process_status_log("write_issue_reminder_to_db", "request")) var_checker("data", data, True) status = execute_write_issues_reminders(data) set_data_to_memcache("ISSUES_AND_REMINDERS", wrap_get_issue_reminder()) emit_data("receive_issues_and_reminders") print(status) elif key == "write_monitoring_moms_to_db": print(get_process_status_log("write_monitoring_moms_to_db", "request")) status = execute_write_monitoring_moms_to_db(data) print(status) elif key == "update_monitoring_tables": print(get_process_status_log("update_monitoring_tables", "request")) # NOTE: UNFINISHED BUSINESS elif key == "run_alert_generation": print(get_process_status_log("run_alert_generation", "request")) site_code = None if data: site_code = data["site_code"] update_alert_gen(site_code=site_code) elif key == "update_db_alert_ewi_sent_status": print(get_process_status_log("update_db_alert_ewi_sent_status", "request")) execute_update_db_alert_ewi_sent_status( data["alert_db_group"], data["site_id"], data["ewi_group"]) else: print("ERROR: Key provided not found.") raise Exception("WEBSOCKET MESSAGE: KEY NOT FOUND")
def render_monitoring_bulletin(release_id): """ Handles the rendering of bulletin. NOTE: Still dont know if this works """ ret = BROWSER_DRIVER.render_bulletin(release_id) if ret["success"]: var_checker("RET IN RENDER", ret["pdf_path"], True) return ret["pdf_path"] return ret["error"]
def wrap_delete_narratives_from_db(): """ Deletes specific narrative. """ try: json_data = request.get_json() var_checker("json_data", json_data, True) status = delete_narratives_from_db(json_data["narrative_id"]) DB.session.commit() except Exception as err: print(err) return status
def wrap_get_community_users_by_site(site_code): """ Route function that get all Dynaslope users by group """ community_users_data = [] if site_code: temp = [site_code] var_checker("temp", temp, True) community_users = get_community_users_simple(site_code=site_code) community_users_data = UsersSchema( many=True).dump(community_users).data return jsonify(community_users_data)
def download_monitoring_bulletin(release_id): """ Handles the download of bulletin. """ ret = BROWSER_DRIVER.render_bulletin(release_id) var_checker("ret", ret, True) if ret["success"]: return send_file(ret["pdf_path"], as_attachment=True, attachment_filename=APP_CONFIG["bulletin_save_path"]) return ret["error"]
def insert_data_tag(tag_type, tag_details, tag_id): """ Writes tags to respective tables. Args: tag_type (String) - Please do note that you should provide the table name as the tag_type tag_details (Dictionary) - Dictionary container details exclusive for each tag table. """ data_insertion_container = None general_tag_id_container = None try: if tag_type == "smsinbox_user_tags": data_insertion_container = SmsInboxUserTags( inbox_id=tag_details["inbox_id"], tag_id=tag_id, user_id=tag_details["user_id"], ts=tag_details["ts"] ) DB.session.add(data_insertion_container) DB.session.flush() general_tag_id_container = data_insertion_container.siu_tag_id elif tag_type == "smsoutbox_user_tags": data_insertion_container = SmsOutboxUserTags( outbox_id=tag_details["outbox_id"], tag_id=tag_id, user_id=tag_details["user_id"], ts=tag_details["ts"] ) DB.session.add(data_insertion_container) DB.session.flush() general_tag_id_container = data_insertion_container.sou_tag_id var_checker(f"New {tag_type} tag saved to DB with ID", general_tag_id_container, True) DB.session.commit() except Exception as err: DB.session.rollback() var_checker(f"Error in saving tags for {tag_type} tag table", err, True) raise return {"message": "success", "status": True, "data": general_tag_id_container}
def wrap_send_email(): """ Function that sends emails """ json_data = request.get_json() try: subject = json_data["subject"] recipients = json_data["recipients"] mail_body = json_data["mail_body"] status = True release_id = None file_name = None try: release_id = json_data["release_id"] file_name = json_data["file_name"] except KeyError: pass send_mail( recipients=recipients, subject=subject, message=mail_body, file_name=file_name, bulletin_release_id=release_id ) response_msg = "Email sent!" if release_id: response_msg = "Bulletin email sent!" except KeyError: response_msg = "Bulletin email NOT sent... problem with keys." status = False except Exception as err: response_msg = "Bulletin email NOT sent... system/network issues." status = False var_checker("PROBLEM with Sending Bulletin", err, True) return jsonify({ "message": response_msg, "status": status })
def update_data_tag(row_to_update, tag_details, tag_id): """ Updates tags to respective tables. Args: row_to_update (String) - Please do note that you should provide row returned by your query. tag_details (Dictionary) - Dictionary container details exclusive for each tag table. """ row_type = type(row_to_update).__name__ id_to_return = None try: if row_type == "SmsInboxUserTags": row_to_update.inbox_id = tag_details["inbox_id"] row_to_update.tag_id = tag_id row_to_update.user_id = tag_details["user_id"] row_to_update.ts = tag_details["ts"] id_to_return = row_to_update.siu_tag_id elif row_type == "SmsOutboxUserTags": row_to_update.inbox_id = tag_details["inbox_id"] row_to_update.tag_id = tag_id row_to_update.user_id = tag_details["user_id"] row_to_update.ts = tag_details["ts"] id_to_return = row_to_update.sou_tag_id except Exception as err: DB.session.rollback() var_checker(f"Error in updating tags for {row_type} tag table", err, True) raise var_checker(f"New {row_type} tag saved to DB with ID", id_to_return, True) DB.session.commit() return {"message": "success", "status": True, "data": id_to_return}
def get_tag_description(tag_id): """ TODO: Revive the code above for it is the right code. Please check the function getting the tag_options in the front end """ s_t = SmsTags var_checker("tag_id", tag_id, True) var_checker("query", s_t.query.filter(s_t.tag_id == tag_id), True) sms_tag_row = s_t.query.filter(s_t.tag_id == tag_id).first() if sms_tag_row: tag = sms_tag_row.tag else: tag = "System Data Issue: No tag found on DB" var_checker("sms_tag_row", sms_tag_row, True) return tag
def communication_background_task(): global MESSAGES global ROOM_MOBILE_IDS inbox_messages_arr = MESSAGES["inbox"] is_first_run = False ground_meas_run = False while True: try: is_first_run, ground_meas_run = process_ground_measurement_reminder( is_first_run, ground_meas_run) updates = get_sms_user_updates() update_process_start = datetime.now() for row in updates: query_start = datetime.now() mobile_id = row.mobile_id update_source = row.update_source inbox_index = next( (index for (index, row_arr) in enumerate(inbox_messages_arr) if row_arr["mobile_details"]["mobile_id"] == mobile_id), -1) if update_source == "inbox": msgs = get_latest_messages(mobile_id) msgs_schema = get_messages_schema_dict(msgs) if inbox_index > -1: message_row = inbox_messages_arr[inbox_index] del inbox_messages_arr[inbox_index] else: message_row = { "mobile_details": get_user_mobile_details(mobile_id) } message_row["messages"] = msgs_schema inbox_messages_arr.insert(0, message_row) MESSAGES["inbox"] = inbox_messages_arr elif update_source == "outbox": if inbox_index > -1: msgs = get_latest_messages(mobile_id) msgs_schema = get_messages_schema_dict(msgs) MESSAGES["inbox"][inbox_index][ "messages"] = msgs_schema unsent_messages_arr = get_unsent_messages(duration=1) unsent_messages = format_unsent_messages( unsent_messages_arr) MESSAGES["unsent"] = unsent_messages # CHECK FOR UPDATES IN MOBILE ID ROOM if mobile_id in ROOM_MOBILE_IDS.keys(): if inbox_index > -1: msgs_schema = MESSAGES["inbox"][inbox_index][ "messages"] else: msgs = get_latest_messages(mobile_id) msgs_schema = get_messages_schema_dict(msgs) ROOM_MOBILE_IDS[mobile_id]["details"][ "messages"] = msgs_schema SOCKETIO.emit("receive_mobile_id_room_update", ROOM_MOBILE_IDS[mobile_id]["details"], room=mobile_id, namespace="/communications") elif update_source == "blocked_numbers": if inbox_index > -1: del MESSAGES["inbox"][inbox_index] elif update_source == "inbox_tag": if inbox_index > -1: msgs = get_latest_messages(mobile_id) msgs_schema = get_messages_schema_dict(msgs) inbox_messages_arr[inbox_index][ "messages"] = msgs_schema query_end = datetime.now() delete_sms_user_update(row) emit_data("receive_latest_messages") print("") print("GET MESSAGE ON MEMCACHE (WS)", (query_end - query_start).total_seconds()) print("") update_process_end = datetime.now() if updates: print("") print(f"COMMS UPDATE PROCESS LOOP (WS) {len(updates)} updates", (update_process_end - update_process_start).total_seconds()) print("") except Exception as err: print("") print("Communication Thread Exception") var_checker("Exception Detail", err, True) print(traceback.format_exc()) DB.session.rollback() pass SOCKETIO.sleep(0.5)
def wrap_write_narratives_to_db(): """ Writes narratives to database. """ try: json_data = request.get_json() var_checker("json_data", json_data, True) site_list = [] try: site_list = json_data["site_list"] print(get_process_status_log("Multiple Site Narrative", "start")) is_multiple_insert = True except KeyError: raise narrative = str(json_data["narrative"]) type_id = json_data["type_id"] event_id = None user_id = json_data["user_id"] timestamp = json_data["timestamp"] if not isinstance(timestamp, datetime): timestamp = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S") try: narrative_id = json_data["narrative_id"] except KeyError: narrative_id = None # UPDATING OF NARRATIVE if narrative_id: for site_id in site_list: has_event_id = bool(json_data["event_id"]) if has_event_id: event_id = json_data["event_id"] else: event = find_narrative_event_id(timestamp, site_id) if event: event_id = event.event_id else: raise Exception( get_process_status_log("INSERT NARRATIVES", "fail")) var_checker("narrative_id", narrative_id, True) status = update_narratives_on_db(narrative_id=narrative_id, site_id=site_id, timestamp=timestamp, narrative=narrative, type_id=type_id, user_id=user_id, event_id=event_id) print( get_process_status_log( f"{status} updated narrative with ID: {narrative_id}", "end")) # INSERT OF NARRATIVE else: for site_id in site_list: has_event_id = bool(json_data["event_id"]) if has_event_id: event_id = json_data["event_id"] else: event_id = find_narrative_event_id(timestamp, site_id) # if event: narrative_id = write_narratives_to_db(site_id=site_id, timestamp=timestamp, narrative=narrative, type_id=type_id, user_id=user_id, event_id=event_id) print( get_process_status_log( f"New narrative with ID {narrative_id}", "end")) # else: # print(get_process_status_log(f"No event found in specified timestamp on site {site_id} | {timestamp}", "fail")) # raise Exception(get_process_status_log("NO EVENT IN SPECIFIED TIMESTAMP", "fail")) # If nothing goes wrong: DB.session.commit() except Exception as err: print("MAIN") print(err) raise return "success"
def handle_update_insert_tags(): """ Function that insert tags """ tag_data = request.get_json() contact_person = tag_data["contact_person"] message = tag_data["message"] tag_type = tag_data["tag_type"] tag_details = tag_data["tag_details"] tag_id_list = tag_details["tag_id_list"] site_id_list = tag_details["site_id_list"] for tag_id in tag_id_list: tag_row = get_tag_by_type(tag_type, tag_details, tag_id) user_id = tag_details["user_id"] if tag_row: response = update_data_tag(row_to_update=tag_row, tag_details=tag_details, tag_id=tag_id) else: response = insert_data_tag(tag_type=tag_type, tag_details=tag_details, tag_id=tag_id) # Get tag description # tag_description = get_tag_description(tag_id=tag_id, tag_type=tag_type) tag_description = get_tag_description(tag_id=tag_id) var_checker("tag_description", tag_description, True) # TODO: change tags when new tags came or use tag_ids if tag_description in ["#GroundMeas", "#GroundObs", "#EwiResponse"]: get_process_status_log(key="Writing narratives", status="request") additional_data = contact_person if tag_description in ["#GroundObs", "#EwiResponse"]: additional_data += f" - {message}" narrative = get_narrative_text(narrative_type="sms_tagging", details={ "tag": tag_description, "additional_data": additional_data }) var_checker("narrative", narrative, True) get_process_status_log( "inserting narratives with provided site_id_list", "request") try: for site_id in site_id_list: # TODO: Make sure that this would handle routine in the future. event = get_latest_monitoring_event_per_site(site_id) var_checker("event", event, True) if event.status == 2: event_id = event.event_id narrative_id = write_narratives_to_db( site_id=site_id, timestamp=datetime.now(), narrative=narrative, type_id=1, user_id=user_id, event_id=event_id) print("narrative_id", narrative_id) except Exception as err: var_checker("error in writing narrative in insert tag api", err, True) get_process_status_log( "inserting narratives with provided site_id_list", "fail") raise get_process_status_log( "inserting narratives with provided site_id_list", "success") var_checker("response of insert", response, True) # Single Commit for all DB.session.commit() return jsonify({"message": "success", "status": True})
def monitoring_background_task(): generated_alerts = [] while True: try: if not generated_alerts: generated_alerts = generate_alerts() set_data_to_memcache(name="GENERATED_ALERTS", data=generated_alerts) alerts_from_db = wrap_get_ongoing_extended_overdue_events() set_data_to_memcache(name="ALERTS_FROM_DB", data=alerts_from_db) candidate_alerts = candidate_alerts_generator.main( generated_alerts_list=generated_alerts, db_alerts_dict=alerts_from_db) set_data_to_memcache(name="CANDIDATE_ALERTS", data=candidate_alerts) set_data_to_memcache(name="ISSUES_AND_REMINDERS", data=wrap_get_issue_reminder()) rainfall_data = execute_get_all_site_rainfall_data() set_data_to_memcache(name="RAINFALL_DATA", data=rainfall_data) emit_data("receive_generated_alerts") emit_data("receive_alerts_from_db") emit_data("receive_candidate_alerts") emit_data("receive_issues_and_reminders") emit_data("receive_rainfall_data") elif datetime.now().minute % 5 == 1: print() system_time = datetime.strftime( datetime.now(), "%Y-%m-%d %H:%M:%S") print(f"{system_time} | Websocket running...") try: generated_alerts = generate_alerts() set_data_to_memcache( name="GENERATED_ALERTS", data=generated_alerts) alerts_from_db = wrap_get_ongoing_extended_overdue_events() set_data_to_memcache( name="ALERTS_FROM_DB", data=alerts_from_db) set_data_to_memcache(name="CANDIDATE_ALERTS", data=candidate_alerts_generator.main( generated_alerts_list=generated_alerts, db_alerts_dict=alerts_from_db) ) print(f"{system_time} | Done processing Candidate Alerts.") except Exception as err: print(err) raise emit_data("receive_generated_alerts") emit_data("receive_candidate_alerts") emit_data("receive_alerts_from_db") # Update rainfall summary data elif datetime.now().minute in [15, 45]: rainfall_data = execute_get_all_site_rainfall_data() set_data_to_memcache(name="RAINFALL_DATA", data=rainfall_data) emit_data("receive_rainfall_data") except Exception as err: print("") print("Monitoring Thread Exception") var_checker("Exception Detail", err, True) print(traceback.format_exc()) DB.session.rollback() SOCKETIO.sleep(60) # Every 60 seconds in production stage
def wrap_send_routine_ewi_sms(): """ Big function handling the preparation of EWI SMS for Routine Step 1. loop provided site list Step 2. generate message per site Step 3. get the recipients Step 4. Prep narrative Step 5. Tag """ json_data = request.get_json() site_list = json_data["site_list"] user_id = json_data["user_id"] var_checker("site_list", site_list, True) try: for site in site_list: site_code = site["site_code"] # site_id = site["site_id"] release_id = site["release_id"] event_id = site["event_id"] ####################### # PREPARE EWI MESSAGE # ####################### ewi_message = create_ewi_message(release_id=release_id) # var_checker("ewi_message", ewi_message, True) ################################ # PREPARE RECIPIENT MOBILE IDS # ################################ org_id_list = get_org_ids(scopes=[0, 1, 2, 3]) routine_recipients = get_contacts_per_site(site_codes=[site_code], org_ids=org_id_list) mobile_id_list = [] for recip in routine_recipients: mobile_numbers = recip["mobile_numbers"] for item in mobile_numbers: mobile_number = item["mobile_number"] mobile_id_list.append(mobile_number) # var_checker("mobile_id_list", mobile_id_list, True) ############################# # STORE MESSAGE TO DATABASE # ############################# outbox_id = insert_message_on_database({ "sms_msg": ewi_message, "recipient_list": mobile_id_list }) ####################### # TAG THE NEW MESSAGE # ####################### tag_details = { "outbox_id": outbox_id, "user_id": user_id, "ts": datetime.now() } tag_id = 125 # TODO: FOR REFACTORING insert_data_tag("sms_outbox_user_tags", tag_details, tag_id) ############################# # PREPARE ROUTINE NARRATIVE # ############################# narrative = f"Sent surficial ground data reminder for routine monitoring" write_narratives_to_db( site["site_id"], datetime.now(), narrative, \ 1, user_id, event_id \ ) DB.session.commit() response = {"message": "success", "status": True} except: DB.session.rollback() response = {"message": "failed", "status": False} return jsonify(response)