Example #1
0
def site_alerts(curr_trig, ts, release_data_ts, connection):
    df = curr_trig.drop_duplicates(
        ['site_id', 'trigger_source', 'alert_level'])
    site_id = df['site_id'].values[0]

    query = "SELECT trigger_id, MAX(ts_last_retrigger) ts_last_retrigger FROM alert_status"
    query += " WHERE trigger_id IN (%s)" %(','.join(map(lambda x: str(x), \
                                         set(df['trigger_id'].values))))
    query += " GROUP BY trigger_id"
    written = db.df_read(query, connection=connection)

    site_curr_trig = pd.merge(df, written, how='left')
    site_curr_trig = site_curr_trig.loc[
        (site_curr_trig.ts_last_retrigger +
         timedelta(1) < site_curr_trig.ts_updated) |
        (site_curr_trig.ts_last_retrigger.isnull()), :]

    if len(site_curr_trig) == 0:
        qdb.print_out('no new trigger for site_id %s' % site_id)
        return

    alert_status = site_curr_trig[['ts_updated', 'trigger_id']]
    alert_status = alert_status.rename(
        columns={'ts_updated': 'ts_last_retrigger'})
    alert_status['ts_set'] = datetime.now()
    data_table = sms.DataTable('alert_status', alert_status)
    db.df_write(data_table, connection=connection)
Example #2
0
def site_alerts(curr_trig, ts, release_data_ts):
    site_id = curr_trig['site_id'].values[0]

    query = "SELECT site_id, stat.trigger_id, trigger_source, alert_level FROM "
    query += "  (SELECT * FROM alert_status "
    query += "  WHERE ts_last_retrigger >= '%s' " % (ts - timedelta(1))
    query += "  ) as stat "
    query += "INNER JOIN "
    query += "  (SELECT trigger_id, site_id, trigger_source, alert_level FROM "
    query += "    (SELECT * FROM operational_triggers "
    query += "    WHERE site_id = %s " % site_id
    query += "    ) as op "
    query += "  INNER JOIN "
    query += "    (SELECT trigger_sym_id, trigger_source, alert_level FROM "
    query += "      operational_trigger_symbols AS trig_sym "
    query += "    INNER JOIN "
    query += "      trigger_hierarchies AS trig "
    query += "    ON trig.source_id = trig_sym.source_id "
    query += "    ) as sym "
    query += "  ON op.trigger_sym_id = sym.trigger_sym_id "
    query += "  ) as sub "
    query += "ON stat.trigger_id = sub.trigger_id"
    sent_alert = qdb.get_db_dataframe(query)

    query = "SELECT * FROM alert_status"
    query += " WHERE trigger_id in (%s)" %(','.join(map(lambda x: str(x), \
                                         set(curr_trig['trigger_id'].values))))
    written = qdb.get_db_dataframe(query)

    site_curr_trig = curr_trig[~curr_trig.trigger_id.isin(written.trigger_id)]
    site_curr_trig = site_curr_trig.sort_values('alert_level', ascending=False)
    site_curr_trig = site_curr_trig.drop_duplicates('trigger_source')

    if len(site_curr_trig) == 0:
        qdb.print_out('no new trigger for site_id %s' % site_id)
        return

    if len(sent_alert) == 0:
        pass
    elif max(site_curr_trig.alert_level) <= max(sent_alert.alert_level):
        if max(sent_alert.alert_level) > 1 or \
                    (max(site_curr_trig.alert_level) == 1 and \
                    'surficial' not in site_curr_trig['trigger_source'].values):
            qdb.print_out('no higher trigger')
            return
        site_curr_trig = site_curr_trig[site_curr_trig.trigger_source ==
                                        'surficial']
    else:
        site_curr_trig = site_curr_trig[
            site_curr_trig.alert_level > max(sent_alert.alert_level)]

    alert_status = site_curr_trig[['ts_last_retrigger', 'trigger_id']]
    alert_status = alert_status.rename(columns={'ts': 'ts_last_retrigger'})
    alert_status['ts_set'] = datetime.now()
    qdb.push_db_dataframe(alert_status, 'alert_status', index=False)
Example #3
0
def get_last_good_data(df):
    if df.empty:
        qdb.print_out("Error: Empty dataframe inputted")
        return
    # groupby node_id
    dfa = df.groupby('node_id')
    # extract the latest timestamp per node_id, drop the index
    dflgd =  dfa.apply(lambda x: x[x.ts == x.ts.max()])
    dflgd = dflgd.reset_index(level=1, drop=True)
    
    return dflgd
def main():
    dyna, sandbox = json_files()
    dyna = dyna.rename(columns = {'timestamp': 'ts', 'site': 'site_code', \
            'alert': 'public_alert', 'sensor_alert': 'subsurface', \
            'rain_alert': 'rainfall', 'ground_alert': 'surficial', \
            'retriggerTS': 'triggers'})
    if max(dyna['ts'].values) == max(sandbox['ts'].values):
        dyna = dyna.set_index('site_code').sort_index()
        sandbox = sandbox.set_index('site_code').sort_index()
        comparison = pd.DataFrame(index=dyna.index)
        comparison['dyna'] = dyna['internal_alert']
        comparison['sandbox'] = sandbox['internal_alert']

        diff = comparison[~(
            comparison['dyna'] == comparison['sandbox'])].reset_index()
        diff['ts'] = max(sandbox['ts'].values)
        qdb.print_out(diff)
        site_diff = diff.groupby('site_code', as_index=False)
        site_diff.apply(to_DB)
Example #5
0
def node_inst_vel(filled_smoothened, roll_window_numpts, start):
    try:          
        lr_xz = ols(y=filled_smoothened.xz, x=filled_smoothened.td,
                    window=roll_window_numpts, intercept=True)
        lr_xy = ols(y=filled_smoothened.xy, x=filled_smoothened.td,
                    window=roll_window_numpts, intercept=True)
                
        filled_smoothened = filled_smoothened.loc[filled_smoothened.ts >= start]
        
        vel_xz = lr_xz.beta.x.values[0:len(filled_smoothened)]
        vel_xy = lr_xy.beta.x.values[0:len(filled_smoothened)]
        filled_smoothened['vel_xz'] = np.round(vel_xz, 4)
        filled_smoothened['vel_xy'] = np.round(vel_xy, 4)
    
    except:
        qdb.print_out("ERROR in computing velocity")
        filled_smoothened['vel_xz'] = np.zeros(len(filled_smoothened))
        filled_smoothened['vel_xy'] = np.zeros(len(filled_smoothened))
    
    return filled_smoothened
Example #6
0
def main(connection='analysis'):
    start_time = datetime.now()
    qdb.print_out(start_time)

    ts = round_data_ts(start_time)
    release_data_ts = release_time(ts) - timedelta(hours=0.5)

    if qdb.does_table_exist('operational_triggers') == False:
        qdb.create_operational_triggers()

    query = "SELECT trigger_id, ts, site_id, trigger_source, "
    query += "alert_level, ts_updated FROM "
    query += "  (SELECT * FROM operational_triggers "
    query += "  WHERE ts <= '%s' " % ts
    query += "  AND ts_updated >= '%s' " % (ts - timedelta(2))
    query += "  ) AS op "
    query += "INNER JOIN "
    query += "  (SELECT trigger_sym_id, alert_level, trigger_source FROM "
    query += "    (SELECT * FROM operational_trigger_symbols "
    query += "    WHERE alert_level > 0 "
    query += "    ) AS trig_sym "
    query += "  INNER JOIN "
    query += "    (SELECT * FROM trigger_hierarchies WHERE trigger_source not in ('moms', 'on demand')) AS trig "
    query += "  USING (source_id) "
    query += "  ) AS sym "
    query += "USING (trigger_sym_id) "
    query += "ORDER BY ts_updated DESC"
    curr_trig = db.df_read(query, connection=connection)

    if len(curr_trig) == 0:
        qdb.print_out('no new trigger')
        return

    if not qdb.does_table_exist('alert_status'):
        qdb.create_alert_status()

    site_curr_trig = curr_trig.groupby('site_id', as_index=False)
    site_curr_trig.apply(site_alerts,
                         ts=ts,
                         release_data_ts=release_data_ts,
                         connection=connection)
Example #7
0
def main():
    start_time = datetime.now()
    qdb.print_out(start_time)

    ts = pub.round_data_ts(start_time)
    release_data_ts = pub.release_time(ts) - timedelta(hours=0.5)

    if qdb.does_table_exist('operational_triggers') == False:
        qdb.create_operational_triggers()

    query = "SELECT trigger_id, ts, site_id, trigger_source, "
    query += "alert_level, ts_updated FROM "
    query += "  (SELECT * FROM operational_triggers "
    query += "  WHERE ts <= '%s' " % ts
    query += "  AND ts_updated >= '%s' " % (ts - timedelta(1))
    query += "  ) AS op "
    query += "INNER JOIN "
    query += "  (SELECT trigger_sym_id, alert_level, trigger_source FROM "
    query += "    (SELECT * FROM operational_trigger_symbols "
    query += "    WHERE alert_level > 0 "
    query += "    ) AS trig_sym "
    query += "  INNER JOIN "
    query += "    trigger_hierarchies AS trig "
    query += "  ON trig_sym.source_id = trig.source_id "
    query += "  ) AS sym "
    query += "ON op.trigger_sym_id = sym.trigger_sym_id "
    query += "ORDER BY ts_updated DESC"
    curr_trig = qdb.get_db_dataframe(query)

    if len(curr_trig) == 0:
        qdb.print_out('no new trigger')
        return

    if not qdb.does_table_exist('alert_status'):
        qdb.create_alert_status()

    curr_trig = curr_trig.rename(columns={"ts_updated": "ts_last_retrigger"})
    site_curr_trig = curr_trig.groupby('site_id', as_index=False)
    site_curr_trig.apply(site_alerts, ts=ts, release_data_ts=release_data_ts)
Example #8
0
def main(end=datetime.now()):
    """Compiles all alerts to compute for public alert and internal alert.
    Writes result to public_alert table and publicalert.json

    Args:
        end (datetime): Optional. Public alert timestamp.
    """
    start_time = datetime.now()
    qdb.print_out(start_time)

    end = round_data_ts(pd.to_datetime(end))
    
    # alert symbols
    # public alert
    public_symbols = get_public_symbols()
    pub_map = alert_map(public_symbols)
    # internal alert
    internal_symbols = get_internal_symbols()
    # operational triggers
    trig_symbols = get_trigger_symbols()
    # subsurface alert
    subsurface_map = trig_symbols[trig_symbols.trigger_source == 'subsurface']
    subsurface_map = alert_map(subsurface_map)
    # surficial alert
    surficial_map = trig_symbols[trig_symbols.trigger_source == 'surficial']
    surficial_map = alert_map(surficial_map)
    # Manifestation Of Movement
    moms_map = trig_symbols[trig_symbols.trigger_source == 'moms']
    moms_map = alert_map(moms_map)
    # rainfall alert
    rain_map = trig_symbols[trig_symbols.trigger_source == 'rainfall']
    rain_map = alert_map(rain_map)
    
    # site id and code
    query = "SELECT site_id, site_code FROM sites WHERE active = 1"
    props = qdb.get_db_dataframe(query)
#    props = props[props.site_code == 'dad']
    site_props = props.groupby('site_id', as_index=False)
    
    alerts = site_props.apply(site_public_alert, end=end,
                              public_symbols=public_symbols,
                              internal_symbols=internal_symbols, 
                              start_time=start_time).reset_index(drop=True)

    alerts = alerts.sort_values(['public_alert', 'site_code'], ascending=[False, True])

    # map alert level to alert symbol
    alerts['public_alert'] = alerts['public_alert'].map(pub_map)
    alerts['rainfall'] = alerts['rainfall'].map(rain_map)
    alerts['surficial'] = alerts['surficial'].map(surficial_map)
    site_alerts = alerts.groupby('site_code', as_index=False)
    alerts = site_alerts.apply(subsurface_sym,
                               sym_map=subsurface_map).reset_index(drop=True)
    # map invalid alerts
    current_events = query_current_events(end)
    current_alerts = current_events.apply(get_alert_history)

    columns = ['iomp', 'site_code', 'alert_symbol', 'ts_last_retrigger', 'remarks', 'trigger_source', 'alert_status', 'public_alert_symbol']
    invalid_alerts = pd.DataFrame(columns=columns)
    
    try:
        for site in current_alerts.site_code.unique():
            site_df = current_alerts[current_alerts.site_code == site]
            count = len(site_df)
            for i in range(0, count):
                if site_df.alert_status.values[i] == -1:
                    alert = pd.Series(site_df.values[i], index=columns)
                    invalid_alerts = invalid_alerts.append(alert, ignore_index=True)
                else:
                    invalid_alerts = invalid_alerts
                    
        invalid_alerts = invalid_alerts.drop_duplicates(['alert_symbol', 'site_code'])
        invalid_alerts['ts_last_retrigger'] = invalid_alerts['ts_last_retrigger'].apply(lambda x: str(x))

    except:
        invalid_alerts = pd.DataFrame()
    
    all_alerts = pd.DataFrame({'invalids': [invalid_alerts], 'alerts': [alerts]})

    public_json = all_alerts.to_json(orient="records")

    output_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
    sc = qdb.memcached()
    if not os.path.exists(output_path+sc['fileio']['output_path']):
        os.makedirs(output_path+sc['fileio']['output_path'])

    with open(output_path+sc['fileio']['output_path']+'PublicAlertRefDB.json', 'w') as w:
        w.write(public_json)

    qdb.print_out('runtime = %s' %(datetime.now() - start_time))
    
    return alerts
Example #9
0
def site_public_alert(site_props, end, public_symbols, internal_symbols,
                      start_time):  
    """Dataframe containing necessary information for public release.

    Args:
        site_props (dataframe): ID and three-letter code of each site.
        end (datetime): Public alert timestamp.
        public_symbols (dataframe): Public alert symbols and id corresponding
                                    to its alert level.
        internal_symbols (dataframe): Internal alert symbols and id
                                      corresponding to its alert level.

    Returns:
        dataframe: Contains timestamp, three-letter site code, public alert, 
                   internal alert, validity of alert, subsurface alert, 
                   surficial alert, rainfall alert, most recent timestamp of
                   alert > 0 (if any) per alert level per operational trigger.
    """
    
    # id and three-letter code per site
    site_code = site_props['site_code'].values[0]
    site_id = site_props['site_id'].values[0]
    qdb.print_out(site_code)

    # Creates a public_alerts table if it doesn't exist yet
    if qdb.does_table_exist('public_alerts') == False:
        qdb.create_public_alerts()
    
    # start of monitoring: start of event or 24 hours from "end"
    try:
        monitoring_type = get_monitoring_type(site_id, end)
    except:
        monitoring_type = 'routine'

    if monitoring_type == 'event':
        start_monitor = event_start(site_id, end)
    else:
        start_monitor = end - timedelta(1)

    # operational triggers for monitoring at timestamp end
    op_trig = get_operational_trigger(site_id, start_monitor, end)
    release_op_trig = op_trig[op_trig.ts_updated >= \
            release_time(end)-timedelta(hours=4)]
    release_op_trig = release_op_trig.drop_duplicates(['source_id', \
            'alert_level'])
    subsurface_id = internal_symbols[internal_symbols.trigger_source == \
            'subsurface']['source_id'].values[0]
    surficial_id = internal_symbols[internal_symbols.trigger_source == \
            'surficial']['source_id'].values[0]
    release_op_trig = release_op_trig[~((release_op_trig.source_id \
            == subsurface_id) & (release_op_trig.ts_updated < end))]
    pos_trig = op_trig[(op_trig.alert_level > 0) & ~((op_trig.alert_level == 1) \
                        & (op_trig.source_id == surficial_id))]
    last_pos_trig = pos_trig.drop_duplicates(['source_id', \
            'alert_level'])

    # public alert based on highest alert level in operational triggers
    public_alert = max(list(pos_trig['alert_level'].values) + [0])
    qdb.print_out('Public Alert %s' %public_alert)

    # subsurface alert
    subsurface = get_tsm_alert(site_id, end)

    # surficial alert
    if public_alert > 0:
        surficial_ts = release_time(end) - timedelta(hours=4)
    else:
        surficial_ts = pd.to_datetime(end.date())
    surficial_id = internal_symbols[internal_symbols.trigger_source == \
            'surficial']['source_id'].values[0]
    try:
        surficial = op_trig[(op_trig.source_id == surficial_id) & \
                 (op_trig.ts_updated >= surficial_ts)]['alert_level'].values[0]
    except:
        surficial = -1
            
    # rainfall alert
    rainfall_id = internal_symbols[internal_symbols.trigger_source == \
            'rainfall']['source_id'].values[0]
    try:
        rainfall = op_trig[(op_trig.source_id == rainfall_id) & \
                           (op_trig.ts_updated >= end - \
                            timedelta(hours=0.5))]['alert_level'].values[0]
    except:
        rainfall = -1
    
    # INTERNAL ALERT
    internal_id = internal_symbols[internal_symbols.trigger_source == \
            'internal']['source_id'].values[0]
    if public_alert > 0:
        # validity of alert
        validity = pd.to_datetime(max(pos_trig['ts_updated'].values)) \
                                 + timedelta(1)
        validity = release_time(validity)
        
        if public_alert == 3:
            validity += timedelta(1)
            
        # internal alert based on positive triggers and data presence
        internal_df = get_internal_alert(pos_trig, release_op_trig,       
                                  internal_symbols)

        # check if rainfall > 0.75% of threshold
        rain75_id = internal_symbols[(internal_symbols.source_id == \
                        rainfall_id)&(internal_symbols.alert_level \
                        == -2)]['trigger_sym_id'].values[0]
        
        if rainfall == 0 and end >= validity - timedelta(hours=0.5):
            internal_df, is_x = replace_rainfall_alert_if_rx(internal_df, internal_symbols,
                                               site_id, end, rainfall_id,
                                               rain75_id)
            
            if is_x == True:
                rainfall = -2

        internal_df = internal_df.sort_values('hierarchy_id')
        internal_alert = ''.join(internal_df['alert_symbol'].values)

        if public_alert > 1:
            internal_alert = public_symbols[public_symbols.alert_level == \
                             public_alert]['alert_symbol'].values[0] + '-' + \
                             internal_alert

    # ground data presence: subsurface, surficial, moms
    if public_alert <= 1:
        if surficial == -1 and len(subsurface[subsurface.alert_level != -1]) == 0:
            ground_alert = -1
        else:
            ground_alert = 0
        if public_alert == 0 or ground_alert == -1:
            pub_internal = internal_symbols[(internal_symbols.alert_level == \
                             ground_alert) & (internal_symbols.source_id == \
                             internal_id)]['alert_symbol'].values[0]
            if public_alert == 0:
                internal_alert = ''
                hyphen = ''
            else:
                hyphen = '-'
        else:
            pub_internal = public_symbols[public_symbols.alert_level == \
                             public_alert]['alert_symbol'].values[0]
            hyphen = '-'
        internal_alert = pub_internal + hyphen + internal_alert
    elif -1 in internal_df[internal_df.trigger_source != 'rainfall']['alert_level'].values:
        ground_alert = -1
    else:
        ground_alert = 0

    # PUBLIC ALERT
    # check if end of validity: lower alert if with data and not rain75
    if public_alert > 0:
        is_release_time_run = end.time() in [time(3, 30), time(7, 30),
                        time(11, 30), time(15, 30), time(19, 30),
                        time(23, 30)]
        is_45_minute_beyond = int(start_time.strftime('%M')) > 45
        is_not_yet_write_time = not (is_release_time_run and is_45_minute_beyond)
        
        # check if end of validity: lower alert if with data and not rain75
        if validity > end + timedelta(hours=0.5):
            pass
        elif rain75_id in internal_df['trigger_sym_id'].values \
                or validity + timedelta(3) > end + timedelta(hours=0.5) \
                    and ground_alert == -1 or is_not_yet_write_time:
            validity = release_time(end)
            
            if is_release_time_run:
                if not(is_45_minute_beyond):
                    do_not_write_to_db = True
        else:
            validity = ''
            public_alert = 0
            internal_alert = internal_symbols[(internal_symbols.alert_level == \
                             ground_alert) & (internal_symbols.source_id == \
                             internal_id)]['alert_symbol'].values[0]
    else:
        validity = ''
        public_alert = 0
        internal_alert = internal_symbols[(internal_symbols.alert_level == \
                         ground_alert) & (internal_symbols.source_id == \
                         internal_id)]['alert_symbol'].values[0]

    # start of event
    if monitoring_type != 'event' and len(pos_trig) != 0:
        ts_onset = min(pos_trig['ts'].values)
        ts_onset = pd.to_datetime(ts_onset)
    
    # most recent retrigger of positive operational triggers
    try:
        #last positive retriggger/s
        triggers = last_pos_trig[['trigger_id', 'alert_symbol', 'ts_updated']]
        triggers = triggers.rename(columns = {'alert_symbol': 'alert', \
                'ts_updated': 'ts'})
        triggers['ts'] = triggers['ts'].apply(lambda x: str(x))
    except:
        triggers = pd.DataFrame(columns=['trigger_id', 'alert', 'ts'])
     
    #technical info for bulletin release
    try:
        #tech_info = pd.DataFrame(columns=['subsurface', 'surficial', 'rainfall', \
        #  'earthquake', 'on demand'])
        pos_trig = pd.merge(pos_trig, internal_symbols, on='trigger_sym_id')
        tech_info = tech_info_maker.main(pos_trig)
    except:
        tech_info = pd.DataFrame()

    
    try:    
        ts = max(op_trig[op_trig.alert_level != -1]['ts_updated'].values)
        ts = round_data_ts(pd.to_datetime(ts))
    except:
        ts = end
        
    if ts > end or (int(start_time.strftime('%M')) >= 45 \
                    or int(start_time.strftime('%M')) >= 15
                    and int(start_time.strftime('%M')) < 30) and ts != end:
        ts = end

    ts = str(ts)    
    validity = str(validity)

    public_df = pd.DataFrame({'ts': [ts], 'site_id': [site_id],
                    'site_code': [site_code], 'public_alert': [public_alert],
                    'internal_alert': [internal_alert], 'validity': [validity],
                    'subsurface': [subsurface], 'surficial': [surficial],
                    'rainfall': [rainfall], 'triggers': [triggers],
                    'tech_info': [tech_info]})

    # writes public alert to database
    pub_sym_id =  public_symbols[public_symbols.alert_level == \
                  public_alert]['pub_sym_id'].values[0]
    site_public_df = pd.DataFrame({'ts': [end], 'site_id': [site_id], \
            'pub_sym_id': [pub_sym_id], 'ts_updated': [end]})
    
    # onset trigger
    try:
        site_public_df['ts'] = round_data_ts(ts_onset)
    except:
        pass
    
    try:
        do_not_write_to_db
    except:
        qdb.alert_to_db(site_public_df, 'public_alerts')
    
    return public_df
Example #10
0
def main(tsm_name='', end='', end_mon=False):
    run_start = datetime.now()
    qdb.print_out(run_start)
    qdb.print_out(tsm_name)

    if tsm_name == '':
        tsm_name = sys.argv[1].lower()

    if end == '':
        try:
            end = pd.to_datetime(sys.argv[2])
        except:
            end = datetime.now()
    else:
        end = pd.to_datetime(end)

    window, sc = rtw.get_window(end)

    tsm_props = qdb.get_tsm_list(tsm_name)[0]
    data = proc.proc_data(tsm_props, window, sc)
    tilt = data.tilt[window.start:window.end]
    lgd = data.lgd
    tilt = tilt.reset_index().sort_values('ts', ascending=True)

    if lgd.empty:
        qdb.print_out('%s: no data' % tsm_name)
        return

    nodal_tilt = tilt.groupby('node_id', as_index=False)
    alert = nodal_tilt.apply(lib.node_alert,
                             colname=tsm_props.tsm_name,
                             num_nodes=tsm_props.nos,
                             disp=float(sc['subsurface']['disp']),
                             vel2=float(sc['subsurface']['vel2']),
                             vel3=float(sc['subsurface']['vel3']),
                             k_ac_ax=float(sc['subsurface']['k_ac_ax']),
                             lastgooddata=lgd,
                             window=window,
                             sc=sc).reset_index(drop=True)

    alert['col_alert'] = -1
    col_alert = pd.DataFrame({
        'node_id': range(1, tsm_props.nos + 1),
        'col_alert': [-1] * tsm_props.nos
    })
    node_col_alert = col_alert.groupby('node_id', as_index=False)
    node_col_alert.apply(lib.column_alert,
                         alert=alert,
                         num_nodes_to_check=int(
                             sc['subsurface']['num_nodes_to_check']),
                         k_ac_ax=float(sc['subsurface']['k_ac_ax']),
                         vel2=float(sc['subsurface']['vel2']),
                         vel3=float(sc['subsurface']['vel3']))

    valid_nodes_alert = alert.loc[~alert.node_id.isin(data.inv)]

    if max(valid_nodes_alert['col_alert'].values) > 0:
        pos_alert = valid_nodes_alert[valid_nodes_alert.col_alert > 0]
        site_alert = trend.main(pos_alert, tsm_props.tsm_id, window.end,
                                data.inv)
    else:
        site_alert = max(
            lib.get_mode(list(valid_nodes_alert['col_alert'].values)))

    tsm_alert = pd.DataFrame({
        'ts': [window.end],
        'tsm_id': [tsm_props.tsm_id],
        'alert_level': [site_alert],
        'ts_updated': [window.end]
    })

    qdb.alert_to_db(tsm_alert, 'tsm_alerts')

    writeOperationalTriggers(tsm_props.site_id, window.end)

    #######################

    #    query = "SELECT ts, alert_level, alert_type, ts_updated FROM"
    #    query += " (SELECT * FROM public_alerts WHERE site_id = %s) AS pub" %tsm_props
    #    query += " INNER JOIN public_alert_symbols AS sym"
    #    query += " ON pub.pub_sym_id = sym.pub_sym_id"
    #    query += " ORDER BY ts DESC LIMIT 1"
    #    public_alert = qdb.get_db_dataframe(query)
    #    if ((public_alert['alert_level'].values[0] != 0 or \
    #            public_alert['alert_type'].values[0] == 'event') \
    #            and (str(window.end.time()) in ['07:30:00', '19:30:00'] or end_mon)) \
    #            or (public_alert.alert.values[0] == 'A0'
    #            and RoundReleaseTime(pd.to_datetime(public_alert.timestamp.values[0])) \
    #            == RoundReleaseTime(window.end)):
    #    plotter.main(data, tsm_props, window, sc, realtime=False)

    #######################

    qdb.print_out(tsm_alert)

    qdb.print_out('run time = ' + str(datetime.now() - run_start))