コード例 #1
0
ファイル: bulletin.py プロジェクト: dynatech/dynaslope3
def prepare_symbols_list(internal_sym_ids):
    arr = []
    has_ground_trigger = False

    for int_sym_id in internal_sym_ids:
        temp = retrieve_data_from_memcache("bulletin_triggers",
                                           {"internal_sym_id": int_sym_id})
        trigger_symbol = temp["internal_sym"]["trigger_symbol"]
        alert_level = trigger_symbol["alert_level"]
        alert_symbol = trigger_symbol["alert_symbol"]
        trigger_origin = trigger_symbol["trigger_hierarchy"]
        is_ground = trigger_origin["is_ground"]
        trigger_source = trigger_origin["trigger_source"]
        hierarchy_id = trigger_origin["hierarchy_id"]

        obj = {
            "internal_sym_id": int_sym_id,
            "alert_level": alert_level,
            "alert_symbol": alert_symbol,
            "trigger_source":
            "manifestation" if trigger_source == "moms" else trigger_source,
            "is_ground": is_ground,
            "hierarchy_id": hierarchy_id,
            "template": temp["template"],
            "description": temp["description"],
        }

        arr.append(obj)

        if is_ground:
            has_ground_trigger = True

    arr = sorted(arr, key=lambda i: (i["hierarchy_id"], -i["alert_level"]))

    return arr, has_ground_trigger
コード例 #2
0
ファイル: monitoring_ws.py プロジェクト: dynatech/dynaslope3
def execute_update_db_alert_ewi_sent_status(alert_db_group, site_id, ewi_group):
    """
    alert_db_group (str):    either "latest", "extended" or "overdue"
    ewi_group (str):        either "sms" or "bulletin"
    """

    alerts_from_db = retrieve_data_from_memcache("ALERTS_FROM_DB")
    json_alerts = json.loads(alerts_from_db)

    # TODO: supposed to be search kung existing sa latest, extended and overdue
    # if wala then don't update
    if alert_db_group:
        group = json_alerts[alert_db_group]
        alert = None
        index = None
        for i, row in enumerate(group):
            if row["event"]["site_id"] == site_id:
                alert = row
                index = i

        alert["sent_statuses"][f"is_{ewi_group}_sent"] = True
        group[index] = alert
        json_alerts[alert_db_group] = group

        set_data_to_memcache("ALERTS_FROM_DB", json.dumps(json_alerts))
        emit_data("receive_alerts_from_db")
コード例 #3
0
ファイル: monitoring_ws.py プロジェクト: dynatech/dynaslope3
def emit_data(keyword, sid=None):
    data_to_emit = None
    if keyword == "receive_generated_alerts":
        data_to_emit = retrieve_data_from_memcache("GENERATED_ALERTS")
    elif keyword == "receive_candidate_alerts":
        data_to_emit = retrieve_data_from_memcache("CANDIDATE_ALERTS")
    elif keyword == "receive_alerts_from_db":
        data_to_emit = retrieve_data_from_memcache("ALERTS_FROM_DB")
    elif keyword == "receive_issues_and_reminders":
        data_to_emit = retrieve_data_from_memcache("ISSUES_AND_REMINDERS")
    elif keyword == "receive_rainfall_data":
        data_to_emit = retrieve_data_from_memcache("RAINFALL_DATA")

    # var_checker("data_list", data_list, True)
    if sid:
        SOCKETIO.emit(keyword, data_to_emit, to=sid, namespace="/monitoring")
    else:
        SOCKETIO.emit(keyword, data_to_emit, namespace="/monitoring")
コード例 #4
0
def process_totally_invalid_sites(totally_invalid_sites_list, extended,
                                  routine_sites_list, nd_internal_alert_sym):
    """
    Process all totally invalid sites for extended or routine
    """

    a0_routine_list = []
    nd_routine_list = []
    extended_list = []

    for generated_alert in totally_invalid_sites_list:
        site_code = generated_alert["site_code"]
        site_id = generated_alert["site_id"]
        ts = datetime.strptime(generated_alert["ts"], "%Y-%m-%d %H:%M:%S")
        is_release_time = check_if_routine_extended_release_time(ts)

        is_in_extended_alerts = list(
            filter(lambda x: x["event"]["site"]["site_code"] == site_code,
                   extended))
        if is_in_extended_alerts:
            if is_release_time:
                general_status = "extended"
                has_ground_data = generated_alert["has_ground_data"]

                if has_ground_data:
                    public_alert_symbol = retrieve_data_from_memcache(
                        "public_alert_symbols", {"alert_level": 0},
                        retrieve_attr="alert_symbol")
                    trigger_list_str = ""
                    internal_alert_symbol = public_alert_symbol
                else:
                    trigger_list_str = nd_internal_alert_sym
                    internal_alert_symbol = nd_internal_alert_sym

                site_wo_alert = {
                    **generated_alert, "trigger_list_str": trigger_list_str,
                    "is_release_time": is_release_time,
                    "alert_level": 0,
                    "internal_alert_level": internal_alert_symbol
                }

                formatted_alert_entry = format_alerts_for_ewi_insert(
                    site_wo_alert, general_status)

                extended_list.append(formatted_alert_entry)
        elif site_code in routine_sites_list:
            has_ground_data = generated_alert["has_ground_data"]

            if has_ground_data:
                a0_routine_list.append(site_id)
            else:
                nd_routine_list.append(site_id)

    return extended_list, a0_routine_list, nd_routine_list
コード例 #5
0
ファイル: monitoring_ws.py プロジェクト: dynatech/dynaslope3
def update_alert_gen(site_code=None):
    """
    May be used to update all alert_gen related data when
    a change was made either by validating triggers or
    an insert was made.
    Compared to the function above, this function handles all three
    important data for the dashboard. Mainly the ff:
        1. generated alerts - current trigger and alert status of sites
        2. candidate alerts - potential releases for sites
        3. alerts from db - current validated/released status of the sites

    Args:
        site_code (String) - may be provided if only one
                site is affected by the changes you did.

    No return. Websocket emit_data handles all returns.
    """
    print(get_process_status_log("Update Alert Generation", "start"))
    try:
        generated_alerts = retrieve_data_from_memcache("GENERATED_ALERTS")
        site_gen_alert = generate_alerts(site_code)

        if site_code:
            load_site_gen_alert = json.loads(site_gen_alert)
            site_gen_alert = load_site_gen_alert.pop()

        # Find the current entry for the site provided
        json_generated_alerts = json.loads(generated_alerts)
        gen_alert_row = next(
            filter(lambda x: x["site_code"] == site_code, json_generated_alerts), None)

        if gen_alert_row:
            # Replace rather update alertgen entry
            gen_alert_index = json_generated_alerts.index(gen_alert_row)
            json_generated_alerts[gen_alert_index] = site_gen_alert

        set_data_to_memcache(name="GENERATED_ALERTS",
                             data=json.dumps(json_generated_alerts))
        set_data_to_memcache(name="ALERTS_FROM_DB",
                             data=wrap_get_ongoing_extended_overdue_events())
        set_data_to_memcache(name="CANDIDATE_ALERTS",
                             data=candidate_alerts_generator.main())
    except Exception as err:
        print(err)
        raise

    print(get_process_status_log("emitting updated alert gen data", "start"))
    emit_data("receive_generated_alerts")
    emit_data("receive_alerts_from_db")
    emit_data("receive_candidate_alerts")
    print(get_process_status_log("emitting updated alert gen data", "end"))

    print(get_process_status_log("update alert gen", "end"))
コード例 #6
0
ファイル: bulletin.py プロジェクト: dynatech/dynaslope3
def process_no_data_triggers(trigger_list_str):
    sources = set()

    if trigger_list_str:  # check if not None
        for match in re.findall(r"ND|.0", trigger_list_str):
            trigger_symbol = retrieve_data_from_memcache(
                "internal_alert_symbols", {"alert_symbol": match},
                retrieve_one=True,
                retrieve_attr="trigger_symbol")
            trigger_source = trigger_symbol["trigger_hierarchy"][
                "trigger_source"]
            trigger_source = "manifestation" if trigger_source == "moms" else trigger_source
            sources.add(trigger_source)

    return sources
コード例 #7
0
ファイル: monitoring_ws.py プロジェクト: dynatech/dynaslope3
def connect():
    """
    Connection
    """
    sid = request.sid
    # CLIENTS.append(sid)
    clients = retrieve_data_from_memcache("CLIENTS")
    if isinstance(clients, str):
        clients = []
    clients.append(sid)
    set_data_to_memcache(name="CLIENTS", data=clients)
    print("Connected user: "******"Current connected clients: {clients}")

    emit_data("receive_generated_alerts", sid=sid)
    emit_data("receive_alerts_from_db", sid=sid)
    emit_data("receive_candidate_alerts", sid=sid)
    emit_data("receive_issues_and_reminders", sid=sid)
    emit_data("receive_rainfall_data", sid=sid)
コード例 #8
0
def get_surficial_data_presence():
    """
    """

    now = datetime.now()
    release_interval_hours = retrieve_data_from_memcache(
        "dynamic_variables", {"var_name": "RELEASE_INTERVAL_HOURS"},
        retrieve_attr="var_value")
    next_release_time = round_to_nearest_release_time(now,
                                                      release_interval_hours)
    prev_release_time = next_release_time - \
        timedelta(hours=release_interval_hours)

    sm = SiteMarkers
    subquery_1 = DB.session.query(
        sm.site_id, sm.site_code,
        DB.func.max(sm.in_use).label("has_surficial_markers")).group_by(
            sm.site_id).subquery()
    subquery_2 = DB.session.query(
        mo.site_id,
        DB.func.max(mo.ts).label("last_ts")).group_by(mo.site_id).subquery()

    result = DB.session.query(subquery_1, subquery_2.c.last_ts) \
        .join(Sites, subquery_1.c.site_id == Sites.site_id).filter(Sites.active == 1) \
        .join(subquery_2, subquery_1.c.site_id == subquery_2.c.site_id).all()

    data_presence = []
    for row in result:
        presence = False
        if prev_release_time <= row.last_ts and row.last_ts <= next_release_time:
            presence = True

        temp = {
            "site_id": row.site_id,
            "site_code": row.site_code,
            "has_surficial_markers": row.has_surficial_markers,
            "last_data": row.last_ts.strftime("%Y-%m-%d %H:%M:%S"),
            "presence": presence
        }

        data_presence.append(temp)

    return data_presence
コード例 #9
0
ファイル: ewi.py プロジェクト: dynatech/dynaslope3
def get_highest_trigger(trigger_list_str):
    triggers_arr = re.sub(r"0|x", "", trigger_list_str)

    triggers = []
    for letter in triggers_arr:
        int_symbol = retrieve_data_from_memcache("internal_alert_symbols",
                                                 {"alert_symbol": letter})

        trigger_symbol = int_symbol["trigger_symbol"]

        sym = {
            "alert_level": trigger_symbol["alert_level"],
            "alert_symbol": int_symbol["alert_symbol"],
            "hierarchy_id":
            trigger_symbol["trigger_hierarchy"]["hierarchy_id"],
            "internal_sym_id": int_symbol["internal_sym_id"]
        }

        triggers.append(sym)

    sorted_arr = sorted(triggers,
                        key=lambda i: (i["hierarchy_id"], -i["alert_level"]))

    return sorted_arr[0]
コード例 #10
0
ファイル: monitoring_ws.py プロジェクト: dynatech/dynaslope3
def disconnect():
    print("In disconnect")
    # CLIENTS.remove(request.sid)
    clients = retrieve_data_from_memcache("CLIENTS")
    clients.remove(request.sid)
    set_data_to_memcache(name="CLIENTS", data=clients)
コード例 #11
0
def process_candidate_alerts(with_alerts, without_alerts, db_alerts_dict,
                             query_end_ts):
    """
    """
    candidate_alerts_list = []

    latest = db_alerts_dict["latest"]
    extended = db_alerts_dict["extended"]
    overdue = db_alerts_dict["overdue"]

    totally_invalid_sites_list = []

    update_routine_extended_release_time_copy()

    global ROUTINE_EXTENDED_RELEASE_TIME
    global RELEASE_INTERVAL_HOURS
    routine_extended_release_time = ROUTINE_EXTENDED_RELEASE_TIME
    release_interval_hours = RELEASE_INTERVAL_HOURS

    routine_sites_list = []
    if query_end_ts.hour == routine_extended_release_time.hour and \
            query_end_ts.minute >= routine_extended_release_time.minute:
        ts = round_to_nearest_release_time(
            query_end_ts, release_interval_hours) - timedelta(minutes=30)
        temp_sites = get_unreleased_routine_sites(ts)
        # routine_sites_list = get_routine_sites(query_end_ts)
        routine_sites_list = temp_sites["unreleased_sites"]

    # Get all latest and overdue from db alerts
    merged_db_alerts_list = latest + overdue
    internal_source_id = retrieve_data_from_memcache(
        "trigger_hierarchies", {"trigger_source": "internal"},
        retrieve_attr="source_id")

    ots_row = retrieve_data_from_memcache("operational_trigger_symbols", {
        "alert_level": -1,
        "source_id": internal_source_id
    })
    nd_internal_alert_sym = ots_row["internal_alert_symbol"]["alert_symbol"]

    if with_alerts:
        for site_w_alert in with_alerts:
            is_new_release = True
            is_release_time = False
            release_start_range = None
            site_code = site_w_alert["site_code"]
            generated_alert_level = site_w_alert["alert_level"]
            site_db_alert = next(
                filter(lambda x: x["event"]["site"]["site_code"] == site_code,
                       merged_db_alerts_list), None)
            general_status = "onset"

            saved_event_triggers = []

            # If already existing in database, i.e. is released
            if site_db_alert:
                # Get latest release data_ts
                db_alert_level = site_db_alert["public_alert_symbol"][
                    "alert_level"]

                general_status = "on-going"
                # saved_event_triggers = get_saved_event_triggers(
                #     site_db_alert["event"]["event_id"])
                saved_event_triggers = site_db_alert["latest_event_triggers"]

                for event_trigger in site_w_alert["event_triggers"]:
                    saved_trigger = next(
                        filter(
                            lambda x: x["internal_sym"]["internal_sym_id"] ==
                            event_trigger["internal_sym_id"],
                            saved_event_triggers), None)

                    is_trigger_new = False
                    if saved_trigger:
                        if datetime.strptime(saved_trigger["ts"], "%Y-%m-%d %H:%M:%S") \
                            < datetime.strptime(
                                event_trigger["ts_updated"], "%Y-%m-%d %H:%M:%S"):
                            is_trigger_new = True
                    else:
                        is_trigger_new = True

                    event_trigger["is_trigger_new"] = is_trigger_new

                db_latest_release_ts = datetime.strptime(
                    site_db_alert["releases"][0]["data_ts"],
                    "%Y-%m-%d %H:%M:%S")

                # RELEASE TIME HANDLER
                # if can release
                site_alert_ts = datetime.strptime(site_w_alert["ts"],
                                                  "%Y-%m-%d %H:%M:%S")
                release_start_range = round_to_nearest_release_time(
                    query_end_ts,
                    release_interval_hours) - timedelta(minutes=30)
                is_release_schedule_range = site_alert_ts >= release_start_range

                # if incoming data_ts has not yet released:
                is_new_release = db_latest_release_ts < site_alert_ts
                if is_release_schedule_range and is_new_release:
                    is_release_time = True

                # if is_onset by comparing alert_level on db and on generated
                if generated_alert_level > db_alert_level:
                    is_release_time = True
            else:
                # is onset release
                is_release_time = True

            if is_new_release:
                highest_valid_public_alert, trigger_list_str, validity_status = fix_internal_alert(
                    site_w_alert, nd_internal_alert_sym)

                site_w_alert = {
                    **site_w_alert, "alert_level": highest_valid_public_alert,
                    "trigger_list_str": trigger_list_str,
                    "is_release_time": is_release_time,
                    "release_schedule": str(release_start_range),
                    "saved_event_triggers": saved_event_triggers
                }

                formatted_alert_entry = format_alerts_for_ewi_insert(
                    site_w_alert, general_status)

                candidate_alerts_list.append(formatted_alert_entry)

                if validity_status == "invalid":
                    totally_invalid_sites_list.append(site_w_alert)

    a0_routine_list = []
    nd_routine_list = []
    routine_non_triggering_moms = {}

    merged_db_alerts_list_copy = latest + overdue

    current_routine_data_ts = None
    if without_alerts:
        for site_wo_alert in without_alerts:
            general_status = "routine"
            site_id = site_wo_alert["site_id"]
            site_code = site_wo_alert["site_code"]
            internal_alert = site_wo_alert["internal_alert"]
            not_a0_db_alerts_list = list(
                filter(lambda x: x["public_alert_symbol"]["alert_level"] != 0,
                       merged_db_alerts_list_copy))

            is_in_raised_alerts = list(
                filter(lambda x: x["event"]["site"]["site_code"] == site_code,
                       not_a0_db_alerts_list))
            is_in_extended_alerts = list(
                filter(lambda x: x["event"]["site"]["site_code"] == site_code,
                       extended))

            is_release_time = True
            site_wo_alert["alert_level"] = 0
            if is_in_raised_alerts:
                general_status = "lowering"
                # Empty event_triggers since for lowering
                site_wo_alert["event_triggers"] = []
            elif is_in_extended_alerts:
                general_status = "extended"

                ts = datetime.strptime(site_wo_alert["ts"],
                                       "%Y-%m-%d %H:%M:%S")
                is_release_time = check_if_routine_extended_release_time(ts)

            if (is_in_raised_alerts
                    or is_in_extended_alerts) and is_release_time:
                if internal_alert == nd_internal_alert_sym:
                    trigger_list_str = nd_internal_alert_sym
                else:
                    trigger_list_str = ""

                site_wo_alert = {
                    **site_wo_alert, "trigger_list_str": trigger_list_str,
                    "is_release_time": is_release_time
                }

                # Add checker if released

                formatted_alert_entry = format_alerts_for_ewi_insert(
                    site_wo_alert, general_status)
                candidate_alerts_list.append(formatted_alert_entry)
            else:
                if site_code in routine_sites_list:
                    # TODO: Add an api checking if site has been released already or not.
                    # Get sites havent released 11:30 release
                    ts = datetime.strptime(site_wo_alert["ts"],
                                           "%Y-%m-%d %H:%M:%S")

                    # add checker if released already

                    # Check if site data entry on generated alerts is already
                    # for release time
                    if ts.time() == routine_extended_release_time:
                        current_routine_data_ts = site_wo_alert["ts"]
                        non_triggering_moms = extract_non_triggering_moms(
                            site_wo_alert["unreleased_moms_list"])

                        if internal_alert == nd_internal_alert_sym:
                            nd_routine_list.append(site_id)
                        else:
                            a0_routine_list.append(site_id)

                        if non_triggering_moms:
                            routine_non_triggering_moms[
                                site_id] = non_triggering_moms

    if totally_invalid_sites_list:
        for invalid_site in totally_invalid_sites_list:
            if invalid_site["site_code"] in routine_sites_list:
                site_code = invalid_site["site_code"]
                site_id = invalid_site["site_id"]
                internal_alert = invalid_site["internal_alert"]
                ts = datetime.strptime(invalid_site["ts"], "%Y-%m-%d %H:%M:%S")

                # Check if site data entry on generated alerts is already
                # for release time
                if ts.time() == routine_extended_release_time:
                    current_routine_data_ts = invalid_site["ts"]
                    non_triggering_moms = extract_non_triggering_moms(
                        invalid_site["unreleased_moms_list"])

                    # Since there is a probabilitiy of site being in the site_w_alert,
                    # check totally invalid sites.
                    invalid = next(
                        filter(lambda x: x["site_code"] == site_code,
                               totally_invalid_sites_list), None)
                    if invalid:
                        non_triggering_moms.extend(
                            extract_non_triggering_moms(
                                invalid["unreleased_moms_list"]))

                    if internal_alert == nd_internal_alert_sym:
                        nd_routine_list.append(site_id)
                    else:
                        a0_routine_list.append(site_id)

                    if non_triggering_moms:
                        routine_non_triggering_moms[
                            site_id] = non_triggering_moms

        extended_list, a0_list, nd_list = process_totally_invalid_sites(
            totally_invalid_sites_list, extended, routine_sites_list,
            nd_internal_alert_sym)
        candidate_alerts_list.extend(extended_list)
        a0_routine_list.extend(a0_list)
        nd_routine_list.extend(nd_list)

    if routine_sites_list:
        has_routine_data = a0_routine_list or nd_routine_list

        if has_routine_data:
            # try:
            #     routine_data_ts = a0_routine_list[0]["ts"]
            # except IndexError:
            #     routine_data_ts = nd_routine_list[0]["ts"]
            routine_data_ts = current_routine_data_ts

            public_alert_symbol = retrieve_data_from_memcache(
                "public_alert_symbols", {"alert_level": 0},
                retrieve_attr="alert_symbol")

            routine_candidates = {
                "public_alert_level":
                0,
                "public_alert_symbol":
                public_alert_symbol,
                "data_ts":
                routine_data_ts,
                "is_release_time":
                True,
                "general_status":
                "routine",
                "routine_details": [{
                    "site_id_list":
                    a0_routine_list,
                    "internal_alert_level":
                    build_internal_alert_level(0, None),
                    "trigger_list_str":
                    None
                }, {
                    "site_id_list":
                    nd_routine_list,
                    "internal_alert_level":
                    build_internal_alert_level(0, nd_internal_alert_sym),
                    "trigger_list_str":
                    nd_internal_alert_sym
                }],
                "non_triggering_moms":
                routine_non_triggering_moms
            }
            candidate_alerts_list.append(routine_candidates)

    return candidate_alerts_list
コード例 #12
0
def fix_internal_alert(alert_entry, nd_internal_alert_sym):
    """
    Changes the internal alert string of each alert entry.
    """
    event_triggers = alert_entry["event_triggers"]
    internal_alert = alert_entry["internal_alert"]
    valid_alert_levels = []
    invalid_triggers = []
    trigger_list_str = None

    for trigger in event_triggers:
        alert_symbol = trigger["alert"]
        ots_row = retrieve_data_from_memcache("operational_trigger_symbols",
                                              {"alert_symbol": alert_symbol})
        trigger["internal_sym_id"] = ots_row["internal_alert_symbol"][
            "internal_sym_id"]

        source_id = trigger["source_id"]
        alert_level = trigger["alert_level"]
        op_trig_row = retrieve_data_from_memcache(
            "operational_trigger_symbols", {
                "alert_level": int(alert_level),
                "source_id": source_id
            })
        internal_alert_symbol = op_trig_row["internal_alert_symbol"][
            "alert_symbol"]

        try:
            if trigger["invalid"]:
                invalid_triggers.append(trigger)
                internal_alert = re.sub(r"%s(0|x)?" % internal_alert_symbol,
                                        "", internal_alert)

        except KeyError:  # If valid, trigger should have no "invalid" key
            valid_a_l = retrieve_data_from_memcache(
                "operational_trigger_symbols", {"alert_symbol": alert_symbol},
                retrieve_attr="alert_level")
            valid_alert_levels.append(valid_a_l)

    highest_valid_public_alert = 0
    if valid_alert_levels:
        # Get the maximum valid alert level
        highest_valid_public_alert = max(valid_alert_levels)

        validity_status = "valid"
        if invalid_triggers:  # If there are invalid triggers, yet there are valid triggers.
            validity_status = "partially_invalid"
    else:
        trigger_list_str = "A1-"  # NOTE: just to signify invalid in dashboard at first glance
        validity_status = "invalid"

    public_alert_sym = internal_alert.split("-")[0]

    is_nd = public_alert_sym == nd_internal_alert_sym
    if is_nd:
        trigger_list_str = nd_internal_alert_sym
    elif highest_valid_public_alert != 0:
        trigger_list_str = ""

    try:
        if is_nd:
            trigger_list_str += "-"

        trigger_list_str += internal_alert.split("-")[1]
    except:
        pass

    return highest_valid_public_alert, trigger_list_str, validity_status
コード例 #13
0
import re
from datetime import date, datetime, time, timedelta

from connection import create_app
from config import APP_CONFIG
from src.utils.extra import (retrieve_data_from_memcache, var_checker,
                             get_process_status_log)
from src.api.monitoring import get_unreleased_routine_sites
from src.utils.monitoring import (build_internal_alert_level,
                                  get_ongoing_extended_overdue_events,
                                  get_routine_sites, get_saved_event_triggers,
                                  round_to_nearest_release_time)

# Every how many hours per release
RELEASE_INTERVAL_HOURS = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "RELEASE_INTERVAL_HOURS"},
    retrieve_attr="var_value")

ROU_EXT_RELEASE_TIME = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "ROUTINE_EXTENDED_RELEASE_TIME"},
    retrieve_attr="var_value")

# Currently 12; so data timestamp to get should be 30 minutes before
DT = datetime.combine(date.today(), time(hour=ROU_EXT_RELEASE_TIME,
                                         minute=0)) - timedelta(minutes=30)
ROUTINE_EXTENDED_RELEASE_TIME = DT.time()

##########################
# Utility functions here
##########################
コード例 #14
0
from datetime import datetime, timedelta, time
from connection import DB
# from run import APP
from sqlalchemy import and_
from src.models.analysis import (RainfallAlerts as ra, MarkerAlerts as ma,
                                 MarkerHistory as mh, NodeAlerts as na,
                                 TSMSensors as tsma)
from src.models.monitoring import (MonitoringMoms as mm)
from src.utils.rainfall import (get_rainfall_gauge_name)
from src.utils.extra import var_checker, retrieve_data_from_memcache
from src.utils.monitoring import round_to_nearest_release_time

# Every how many hours per release
RELEASE_INTERVAL_HOURS = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "RELEASE_INTERVAL_HOURS"},
    retrieve_attr="var_value")

#####################################
# DATA PROCESSING CODES BEYOND HERE #
#####################################


def get_on_demand_tech_info(on_demand_details):
    """
    """
    # on_demand_details

    return on_demand_details

コード例 #15
0
ファイル: ewi.py プロジェクト: dynatech/dynaslope3
def create_ewi_message(release_id=None):
    """
    Returns ewi message for event, routine monitoring.

    Arg:
        release_id (Int) - by not providing a release_id, you are basically asking for a template.
        In this case, routine ewi sms template.
    """
    today = date.today()
    data_ts = datetime(today.year, today.month, today.day, 12, 0)
    greeting = get_greeting(data_ts)
    address = "(site_location)"
    ts_str = format_timestamp_to_string(data_ts)
    alert_level = 0
    monitoring_status = 2
    is_onset = False

    if release_id:
        release_id = int(release_id)
        release = get_monitoring_releases(release_id=release_id,
                                          load_options="ewi_sms_bulletin")
        data_ts = release.data_ts

        event_alert = release.event_alert
        pub_sym_id = event_alert.pub_sym_id
        event_alert_id = event_alert.event_alert_id
        alert_level = event_alert.public_alert_symbol.alert_level

        event = event_alert.event
        site = event.site
        validity = event.validity
        monitoring_status = event.status

        is_onset = check_if_onset_release(event_alert_id, release_id, data_ts)
        updated_data_ts = data_ts
        if not is_onset:
            updated_data_ts = data_ts + timedelta(minutes=30)

        greeting = get_greeting(updated_data_ts)
        address = build_site_address(site)
        ts_str = format_timestamp_to_string(updated_data_ts)

    # No ground measurement reminder if A3
    ground_reminder = ""
    if alert_level != 3:
        if release_id:
            has_active_markers = check_if_site_has_active_surficial_markers(
                site_id=site.site_id)
            g_data = "ground data" if has_active_markers else "ground observation"
        else:
            g_data = "ground data/ground observation"
        ground_reminder = f"Inaasahan namin ang pagpapadala ng LEWC ng {g_data} "

        is_alert_0 = alert_level == 0

        reporting_ts, modifier = get_next_ground_data_reporting(
            data_ts, is_onset, is_alert_0=is_alert_0, include_modifier=True)
        reporting_time = format_timestamp_to_string(reporting_ts,
                                                    time_only=True)

        if alert_level in [1, 2]:
            ground_reminder += f"{modifier} bago mag-{reporting_time}. "
        else:
            clause = " para sa"
            reason = " susunod na routine monitoring"

            reporting_str = ""

            if release_id and monitoring_status == 2:  # if monitoring status is event
                reporting_date = format_timestamp_to_string(reporting_ts,
                                                            date_only=True)
                modifier = f"bukas, {reporting_date},"

                day = (updated_data_ts - validity).days

                if day == 0:
                    extended_day = "unang"
                elif day == 1:
                    extended_day = "ikalawang"
                elif day == 2:
                    extended_day = "huling"

                if day in [0, 1, 2]:
                    reason = f" {extended_day} araw ng 3-day extended monitoring"
                    reporting_str = f"{modifier} bago mag-{reporting_time}"

            ground_reminder += f"{reporting_str}{clause}{reason}."

    desc_and_response = ""
    next_ewi = ""
    if alert_level > 0:
        trigger_list_str = release.trigger_list
        trigger_list_str = process_trigger_list(trigger_list_str,
                                                include_ND=False)

        highest_trig = get_highest_trigger(trigger_list_str)
        ewi_trig = retrieve_data_from_memcache(
            "bulletin_triggers",
            {"internal_sym_id": highest_trig["internal_sym_id"]})
        trigger_desc = ewi_trig["sms"]

        res = [
            row for row in BULLETIN_RESPONSES
            if row["pub_sym_id"] == pub_sym_id
        ].pop()
        ewi_response = copy.deepcopy(res)
        response = ewi_response["recommended"].upper()
        desc_and_response = f" {trigger_desc}. Ang recommended response ay {response}"

        next_ewi_release_ts = get_next_ewi_release_ts(data_ts, is_onset)
        next_ts = format_timestamp_to_string(next_ewi_release_ts,
                                             time_only=True)

        next_ewi += f"Ang susunod na early warning information ay mamayang {next_ts}."

    third_line = ""
    if ground_reminder != "" or next_ewi != "":
        third_line += f"{ground_reminder}{next_ewi}\n\n"

    ewi_message = (
        f"Magandang {greeting} po.\n\n"
        f"Alert {alert_level} ang alert level sa {address} ngayong {ts_str}."
        f"{desc_and_response}\n\n"
        f"{third_line}Salamat.")

    return ewi_message
コード例 #16
0
ファイル: ewi.py プロジェクト: dynatech/dynaslope3
"""
"""

import re
import copy
from datetime import timedelta, datetime, date
from src.utils.monitoring import (get_monitoring_releases,
                                  check_if_onset_release,
                                  get_next_ground_data_reporting,
                                  get_next_ewi_release_ts,
                                  process_trigger_list)
from src.utils.sites import build_site_address
from src.utils.surficial import check_if_site_has_active_surficial_markers
from src.utils.extra import retrieve_data_from_memcache, format_timestamp_to_string

BULLETIN_RESPONSES = retrieve_data_from_memcache("bulletin_responses")

RELEASE_INTERVAL_HOURS = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "RELEASE_INTERVAL_HOURS"},
    retrieve_attr="var_value")


def get_greeting(data_ts):
    hour = data_ts.hour
    greeting = ""

    if hour == 0:
        greeting = "gabi"
    elif hour < 11:
        greeting = "umaga"
    elif hour == 12:
コード例 #17
0
def format_alerts_for_ewi_insert(alert_entry, general_status):
    """
    Release time will come from user entry form to be added
    to release_details
    Publisher details will come from user entry form
    """
    site_id = alert_entry["site_id"]
    site_code = alert_entry["site_code"]
    alert_level = alert_entry["alert_level"]
    data_ts = alert_entry["ts"]
    trigger_list_str = alert_entry["trigger_list_str"]
    unreleased_moms_list = alert_entry["unreleased_moms_list"]
    current_trigger_alerts = alert_entry["current_trigger_alerts"]

    site_details = {"site_id": site_id, "site_code": site_code}

    public_alert_symbol = retrieve_data_from_memcache(
        "public_alert_symbols", {"alert_level": int(alert_level)},
        retrieve_attr="alert_symbol")

    non_triggering_moms = extract_non_triggering_moms(unreleased_moms_list)

    current_triggers_status = []
    for row in current_trigger_alerts:
        trigger_type = row["type"]
        details = row["details"]

        if details["alert_level"] < 0:  # accept only nd and rx
            current_triggers_status.append({
                "trigger_source": trigger_type,
                **details
            })

    formatted_alerts_for_ewi = {
        **site_details, "internal_alert_level":
        build_internal_alert_level(alert_level, trigger_list_str),
        "public_alert_level":
        alert_level,
        "public_alert_symbol":
        public_alert_symbol,
        "release_details": {
            "data_ts": data_ts,
            "trigger_list_str": trigger_list_str
        },
        "general_status":
        general_status,
        "current_triggers_status":
        current_triggers_status,
        "non_triggering_moms":
        non_triggering_moms,
        "unresolved_moms_list":
        alert_entry["unresolved_moms_list"]
    }

    try:
        formatted_alerts_for_ewi = {
            **formatted_alerts_for_ewi, "release_schedule":
            alert_entry["release_schedule"]
        }
    except KeyError:
        pass

    if general_status not in ["routine"]:
        triggers = alert_entry["event_triggers"]
        trigger_list_arr = []

        for trigger in triggers:
            if trigger != {}:
                try:
                    is_trigger_new = trigger["is_trigger_new"]
                    del trigger["is_trigger_new"]
                except KeyError:
                    is_trigger_new = True

                if is_trigger_new:
                    trig_dict = {
                        **trigger,
                        # For UI purposes
                        "trigger_alert_level":
                        trigger["alert"]
                    }

                    if trigger["trigger_type"] == "moms":
                        moms_trig_alert_level = trigger["alert_level"]

                        # Get moms with same alert level of trigger
                        moms_list = list(
                            filter(
                                lambda x: x["op_trigger"] ==
                                moms_trig_alert_level, unreleased_moms_list))

                        trig_dict["moms_list"] = moms_list
                        del trig_dict["moms_list_notice"]

                    trigger_list_arr.append(trig_dict)

        # THIS IS THE BACKEND to_extend_validity.
        has_unresolved_moms = bool(
            formatted_alerts_for_ewi["unresolved_moms_list"])
        to_extend_validity = True if (not alert_entry["has_ground_data"]
                                      or has_unresolved_moms) else False

        try:
            saved_event_triggers = alert_entry["saved_event_triggers"]
        except KeyError:
            saved_event_triggers = []

        try:
            has_ground_data = alert_entry["has_ground_data"]
        except KeyError:
            has_ground_data = None

        formatted_alerts_for_ewi = {
            **formatted_alerts_for_ewi, "is_release_time":
            alert_entry["is_release_time"],
            "to_extend_validity":
            to_extend_validity,
            "trigger_list_arr":
            trigger_list_arr,
            "has_ground_data":
            has_ground_data,
            "saved_event_triggers":
            saved_event_triggers
        }

    return formatted_alerts_for_ewi
コード例 #18
0
ファイル: bulletin.py プロジェクト: dynatech/dynaslope3
from selenium.common.exceptions import TimeoutException, NoSuchElementException

from config import APP_CONFIG

from src.models.monitoring import MonitoringReleasesSchema
from src.utils.monitoring import (get_monitoring_releases,
                                  get_monitoring_triggers,
                                  compute_event_validity,
                                  check_if_onset_release,
                                  get_next_ground_data_reporting,
                                  get_next_ewi_release_ts)
from src.utils.extra import retrieve_data_from_memcache, format_timestamp_to_string, var_checker

# Number of hours extended if no_data upon validity
NO_DATA_HOURS_EXTENSION = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "NO_DATA_HOURS_EXTENSION"},
    retrieve_attr="var_value")

RELEASE_INTERVAL_HOURS = retrieve_data_from_memcache(
    "dynamic_variables", {"var_name": "RELEASE_INTERVAL_HOURS"},
    retrieve_attr="var_value")

BULLETIN_RESPONSES = retrieve_data_from_memcache("bulletin_responses")

INTERNAL_ALERT_SYMBOLS = retrieve_data_from_memcache("internal_alert_symbols")


class DriverContainer:
    root_path = APP_CONFIG["root_path"]
    path = f"{root_path}/src/drivers"
    save_path = APP_CONFIG["bulletin_save_path"]