示例#1
0
def web_plotter(site_code, end, days):
    """
    Created by Kevin
    For integration and refactoring in the future
    """

    start_time = datetime.now()
    qdb.print_out(start_time)

    site_code = [site_code]
    dt_end = pd.to_datetime(end)
    sc = mem.server_config()
    rtw = sc['rainfall']['roll_window_length']
    ts_end, start, offsetstart = get_rt_window(
        float(days), float(rtw), end=dt_end)

    gauges = rainfall_gauges()
    if site_code != '':
        gauges = gauges[gauges.site_code.isin(site_code)]

    gauges['site_id'] = gauges['site_id'].apply(lambda x: float(x))
    site_props = gauges.groupby('site_id')

    plot_data = site_props.apply(rp.main, offsetstart=offsetstart,
                                 tsn=end, save_plot=False, sc=sc,
                                 start=start, output_path="",
                                 end=ts_end).reset_index(drop=True)

    json_plot_data = plot_data.to_json(orient="records")

    qdb.print_out("runtime = %s" % (datetime.now() - start_time))

    return json_plot_data
def main(end_ts=datetime.now()):

    start_ts = pd.to_datetime(end_ts) - timedelta(2)
    print(start_ts)
    surficial_triggers = qdb.get_surficial_trigger(start_ts, end_ts)

    if len(surficial_triggers) == 0:
        qdb.print_out("No surficial trigger to process")

    for index, surficial in surficial_triggers.iterrows():
        ts_updated = surficial['ts_updated']
        public_ts_start = round_data_ts(ts_updated)
        alert_level = surficial['alert_level']
        alert_symbol = surficial['alert_symbol']
        alert_status = surficial['alert_status']
        site_id = surficial['site_id']
        site_code = surficial['site_code']

        if (alert_symbol == 'lt'):
            if (alert_status == 1):
                qdb.print_out("Found valid lt surficial trigger for " + \
                              "%s at %s" %(site_code.upper(), ts_updated))
                trigger_sym_id = qdb.get_trigger_sym_id(2, 'surficial')
                df = pd.DataFrame({
                    'ts': [ts_updated],
                    'site_id': [site_id],
                    'trigger_sym_id': [trigger_sym_id],
                    'ts_updated': [ts_updated]
                })
                qdb.alert_to_db(df, 'operational_triggers', lt_overwrite=False)
                qdb.print_out(" > Added l2 trigger on operational triggers")

        # Process only l2 and l3 with alert status of -1 (invalid)
        elif (alert_status == -1):
            valid_cotriggers = qdb.get_valid_cotriggers(
                site_id, public_ts_start)
            dont_delete = False
            # Check if it has co-triggers on start of event
            # tho highly unlikely
            if len(valid_cotriggers) != 0:
                for index, valid in valid_cotriggers.iterrows():
                    # Don't delete public alert entry if there
                    # is a co-trigger that's equal or
                    # greater of alert level
                    if (valid['alert_level'] >= alert_level):
                        qdb.print_out(
                            "%s has valid co-trigger: deleting will NOT commence"
                            % (site_code.upper()))
                        dont_delete = True
                        break

            if dont_delete == False:
                qdb.delete_public_alert(site_id, public_ts_start)
                qdb.print_out("Deleted {} public alert of {}".format(
                    public_ts_start, site_code))
示例#3
0
def get_last_good_data(df):
    if df.empty:
        qdb.print_out("Error: Empty dataframe inputted")
        return pd.DataFrame()
    # groupby node_id
    dfa = df.groupby('node_id')
    # extract the latest timestamp per node_id, drop the index
    dflgd = dfa.apply(lambda x: x[x.ts == x.ts.max()])
    dflgd = dflgd.reset_index(level=1, drop=True)

    return dflgd
示例#4
0
def main():
    """Writes in rainfall_gauges information on available rain gauges 
     for rainfall alert analysis

    """

    start = datetime.now()
    qdb.print_out(start)

    if qdb.does_table_exist('rainfall_gauges') == False:
        #Create a rainfall_gauges table if it doesn't exist yet
        qdb.create_rainfall_gauges()

    senslope = mem.get('df_dyna_rain_gauges')
    senslope = senslope.loc[senslope.has_rain == 1, :]
    senslope.loc[:, 'data_source'] = 'senslope'

    noah = noah_gauges()

    all_gauges = senslope.append(noah, sort=False)
    all_gauges.loc[:, 'gauge_name'] = all_gauges.loc[:, 'gauge_name'].apply(
        lambda x: str(x))
    all_gauges.loc[:, 'date_activated'] = pd.to_datetime(
        all_gauges.loc[:, 'date_activated'])
    written_gauges = mem.get('df_rain_gauges')
    not_written = set(all_gauges['gauge_name']) \
                     - set(written_gauges['gauge_name'])

    new_gauges = all_gauges.loc[all_gauges.gauge_name.isin(not_written), :]
    new_gauges = new_gauges.loc[new_gauges.date_deactivated.isnull(), :]
    new_gauges = new_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(new_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', new_gauges)
        db.df_write(data_table)

    deactivated = written_gauges.loc[
        ~written_gauges.date_deactivated.isnull(), :]

    deactivated_gauges = all_gauges.loc[(~all_gauges.date_deactivated.isnull()) \
                                  & (~all_gauges.gauge_name.isin(not_written))\
                                  & (~all_gauges.gauge_name.isin(deactivated.gauge_name)), :]
    date_deactivated = pd.to_datetime(
        deactivated_gauges.loc[:, 'date_deactivated'])
    deactivated_gauges.loc[:, 'date_deactivated'] = date_deactivated
    deactivated_gauges = deactivated_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(deactivated_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', deactivated_gauges)
        db.df_write(data_table)

    qdb.print_out('runtime = %s' % (datetime.now() - start))
示例#5
0
def plot_column_positions(df, tsm_props, window, sc, show_part_legend,
                          max_min_cml=''):
#==============================================================================
# 
#     DESCRIPTION
#     returns plot of xz and xy absolute displacements of each node
# 
#     INPUT
#     colname; array; list of sites
#     x; dataframe; vertical displacements
#     xz; dataframe; horizontal linear displacements along the planes defined by xa-za
#     xy; dataframe; horizontal linear displacements along the planes defined by xa-ya
#==============================================================================

    try:
        fig=plt.figure()
        ax_xz=fig.add_subplot(121)
        ax_xy=fig.add_subplot(122,sharex=ax_xz,sharey=ax_xz)
    
        ax_xz=nonrepeat_colors(ax_xz,len(set(df['ts'].values)),color='plasma')
        ax_xy=nonrepeat_colors(ax_xy,len(set(df['ts'].values)),color='plasma')
    
        colposTS = pd.DataFrame({'ts': sorted(set(df.ts)), 'index':
            range(len(set(df.ts)))})
        
        dfts = df.groupby('ts')
        dfts.apply(subplot_colpos, ax_xz=ax_xz, ax_xy=ax_xy,
                   show_part_legend=show_part_legend, sc=sc, colposTS=colposTS)
    
#        try:
#            max_min_cml = max_min_cml.apply(lambda x: x*1000)
#            xl = df.loc[(df.ts == window.end)&(df.node_id <= tsm_props.nos)&(df.node_id >= 1)]['x'].values[::-1]
#            ax_xz.fill_betweenx(xl, max_min_cml['xz_maxlist'].values, max_min_cml['xz_minlist'].values, where=max_min_cml['xz_maxlist'].values >= max_min_cml['xz_minlist'].values, facecolor='0.7',linewidth=0)
#            ax_xy.fill_betweenx(xl, max_min_cml['xy_maxlist'].values, max_min_cml['xy_minlist'].values, where=max_min_cml['xy_maxlist'].values >= max_min_cml['xy_minlist'].values, facecolor='0.7',linewidth=0)
#        except:
#            qdb.print_out('error in plotting noise env')
    
        for tick in list(ax_xz.xaxis.get_minor_ticks()) \
                + list(ax_xy.xaxis.get_minor_ticks()) \
                + list(ax_xz.xaxis.get_major_ticks()) \
                + list(ax_xy.xaxis.get_major_ticks()):          
            tick.label.set_rotation('vertical')

        plt.subplots_adjust(top=0.92, bottom=0.15, left=0.10, right=0.73)        
        plt.suptitle(tsm_props.tsm_name)
        ax_xz.grid(True)
        ax_xy.grid(True)

    except:        
        qdb.print_out(tsm_props.tsm_name + " ERROR in plotting column position")

    return ax_xz,ax_xy
示例#6
0
def download_rainfall_noah(noah_id, fdate, tdate):
    """Downloads rainfall data of noah_id from fdate to tdate.
    
    Args:
        noah_id (int): Device id of noah data.
        fdate (timestamp): Timestamp start of data to be downloaded.
        tdate (timestamp): Timestamp end of data to be downloaded.

    Returns:
        dataframe: Rainfall data of noah_id from fdate to tdate if with data
                    else empty dataframe.
    
    """

    #Reduce latest_ts by 1 day as a work around for GMT to local conversion
    offset_date = (pd.to_datetime(fdate) - timedelta(1)).strftime("%Y-%m-%d")

    sc = mem.server_config()
    url = (sc['rainfall']['noah_data'] +
           '/%s/from/%s/to/%s') % (noah_id, offset_date, tdate)
    try:
        req = requests.get(url,
                           auth=(sc['rainfall']['noah_user'],
                                 sc['rainfall']['noah_password']))
    except:
        qdb.print_out("Can't get request. Please check internet connection")
        return pd.DataFrame()

    try:
        df = pd.DataFrame(req.json()["data"])
    except:
        qdb.print_out("error: %s" % noah_id)
        return pd.DataFrame()

    try:
        #rename dateTimeRead into ts and rain_value into rain
        df = df.rename(columns={'rain_value': 'rain', 'dateTimeRead': 'ts'})

        df = df.drop_duplicates('ts')
        df['ts'] = df['ts'].apply(lambda x: pd.to_datetime(str(x)[0:19]))
        df['rain'] = df['rain'].apply(lambda x: float(x))
        df = df.sort_values('ts')

        #remove the entries that are less than fdate
        df = df[df.ts > fdate]

        return df[['ts', 'rain']]

    except:
        return pd.DataFrame()
示例#7
0
def main():
    """Updates data of NOAH rain gauges.
        
    """

    start_time = datetime.now()
    qdb.print_out(start_time)

    #get the list of rainfall NOAH rain gauge IDs
    gauges = mem.get('df_rain_props')

    gauges = gauges[gauges.data_source == 'noah'].drop_duplicates('rain_id')
    noah_gauges = gauges.groupby('rain_id')
    noah_gauges.apply(update_single_table)

    qdb.print_out('runtime = %s' % (datetime.now() - start_time))
示例#8
0
def main(site_code=''):
    """Writes in rainfall_priorities information on nearest rain gauges
    from the project sites for rainfall alert analysis

    """

    start = datetime.now()
    qdb.print_out(start)
    
    coord = mem.get('df_sites')
    if site_code == '':
        try:
            site_code = sys.argv[1].lower()
            site_code = site_code.replace(' ', '').split(',')
        except:
            pass
    else:
        site_code = site_code.replace(' ', '').split(',')
    if site_code != '':
        coord = coord.loc[coord.site_code.isin(site_code), :]
    
    coord = coord.loc[coord.active == 1, ['site_id', 'latitude', 'longitude']]

    rg_coord = mem.get('df_rain_gauges')
    rg_coord = rg_coord[rg_coord.date_deactivated.isnull()]
    site_coord = coord.groupby('site_id', as_index=False)
    nearest_rg = site_coord.apply(get_distance, rg_coord=rg_coord)
    nearest_rg['distance'] = np.round(nearest_rg.distance,2)
    nearest_rg = nearest_rg.reset_index(drop=True)
    
    if qdb.does_table_exist('rainfall_priorities') == False:
        #Create a NOAH table if it doesn't exist yet
        qdb.create_rainfall_priorities()
        
    to_mysql(nearest_rg)
    
    qdb.print_out('runtime = %s' %(datetime.now() - start))
示例#9
0
def update_table_data(noah_id, gauge_name, fdate, tdate, noah_gauges):
    """Updates data of table gauge_name from fdate to tdate.
    
    Args:
        noah_id (int): Device id of noah data.
        gauge_name (str): Name of table containing rainfall data of noah_id.
        fdate (timestamp): Timestamp start of data to be downloaded.
        tdate (timestamp): Timestamp end of data to be downloaded.
        noah_gauges (dataframe): Rain gauge properties- id, name, data source.

    """

    noah_data = download_rainfall_noah(noah_id, fdate, tdate)
    cur_ts = datetime.now()

    if noah_data.empty:
        qdb.print_out("    no data...")

        #Insert an entry with values: [timestamp, -1] as a marker
        #-1 values should not be included in computation of cml rainfall
        if pd.to_datetime(tdate) <= cur_ts:
            place_holder_data = pd.DataFrame({"ts": [tdate], "rain": [-1.0]})
            data_table = sms.DataTable(gauge_name, place_holder_data)
            db.df_write(data_table)

    else:
        #Insert the new data on the noahid table
        data_table = sms.DataTable(gauge_name, noah_data)
        db.df_write(data_table)

    #The table is already up to date
    if pd.to_datetime(tdate) > cur_ts:
        return
    else:
        #call this function again until the maximum recent timestamp is hit
        update_single_table(noah_gauges)
示例#10
0
def update_single_table(noah_gauges):
    """Updates data of table gauge_name.
    
    Args:
        noah_gauges (dataframe): Rain gauge properties- id, name, data source.
    
    """

    noah_id = noah_gauges['gauge_name'].values[0]
    gauge_name = 'rain_noah_%s' % noah_id

    #check if table gauge_name exists
    if qdb.does_table_exist(gauge_name) == False:
        #Create a NOAH table if it doesn't exist yet
        qdb.print_out("Creating NOAH table '%s'" % gauge_name)
        qdb.create_NOAH_table(gauge_name)
    else:
        qdb.print_out('%s exists' % gauge_name)

    #Find the latest timestamp for noah_id (which is also the start date)
    latest_ts = qdb.get_latest_ts(gauge_name)

    if (latest_ts == '') or (latest_ts == None):
        #assign a starting date if table is currently empty
        latest_ts = datetime.now() - timedelta(3)
    else:
        latest_ts = latest_ts.strftime("%Y-%m-%d %H:%M:%S")

    qdb.print_out("    Start timestamp: %s" % latest_ts)

    #Generate end time
    end_ts = (pd.to_datetime(latest_ts) + timedelta(1)).strftime("%Y-%m-%d")
    qdb.print_out("    End timestamp: %s" % end_ts)

    #Download data for noah_id
    update_table_data(noah_id, gauge_name, latest_ts, end_ts, noah_gauges)
示例#11
0
def main(site_code='', end='', Print=True, write_to_db=True,
         print_plot=False, save_plot=True, days='', is_command_line_run=True):
    """Computes alert and plots rainfall data.
    
    Args:
        site_code (list): Site codes to compute rainfall analysis for. Optional.
                          Defaults to empty string which will compute alert
                          and plot for all sites.
        Print (bool): To print plot and summary of alerts. Optional. Defaults to
                      True.
        end (datetime): Timestamp of alert and plot to be computed. Optional.
                        Defaults to current timestamp.

    Returns:
        str: Json format of cumulative rainfall and alert per site.
    
    """

    start_time = datetime.now()
    qdb.print_out(start_time)

    if site_code == '':
        if is_command_line_run:
            site_code = sys.argv[1].lower()
            site_code = site_code.replace(' ', '').split(',')
    else:
        site_code = site_code.replace(' ', '').split(',')
            
    if end == '':
        try:
            end = pd.to_datetime(sys.argv[2])
        except:
            end = datetime.now()
    else:
        end = pd.to_datetime(end)

    output_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
                                                                   '../../..'))
    
    sc = mem.server_config()

    #creates directory if it doesn't exist
    if (sc['rainfall']['print_plot'] or sc['rainfall']['print_summary_alert']) and Print:
        if not os.path.exists(output_path+sc['fileio']['rainfall_path']):
            os.makedirs(output_path+sc['fileio']['rainfall_path'])

    # setting monitoring window
    if days != '':
        sc['rainfall']['rt_window_length'] = days
    end, start, offsetstart = get_rt_window(float(sc['rainfall']['rt_window_length']),
                            float(sc['rainfall']['roll_window_length']), end=end)
    tsn=end.strftime("%Y-%m-%d_%H-%M-%S")

    # 4 nearest rain gauges of each site with threshold and distance from site
    gauges = rainfall_gauges()
    if site_code != '':
        gauges = gauges[gauges.site_code.isin(site_code)]
    gauges['site_id'] = gauges['site_id'].apply(lambda x: float(x))

    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_symbol = trigger_symbol[trigger_symbol.trigger_source == 'rainfall']
    trigger_symbol['trigger_sym_id'] = trigger_symbol['trigger_sym_id'].apply(lambda x: float(x))
    site_props = gauges.groupby('site_id')
    
    summary = site_props.apply(ra.main, end=end, sc=sc,
                                trigger_symbol=trigger_symbol, write_to_db=write_to_db)
    summary = summary.reset_index(drop=True)[['site_id', 'site_code',
                    '1D cml', 'half of 2yr max', '3D cml', '2yr max',
                    'DataSource', 'alert']]
                    
    if Print == True:
        if sc['rainfall']['print_summary_alert']:
            summary.to_csv(output_path+sc['fileio']['rainfall_path'] +
                        'SummaryOfRainfallAlertGenerationFor'+tsn+'.csv',
                        sep=',', mode='w', index=False)
        if sc['rainfall']['print_plot'] or print_plot:
            rain_data = site_props.apply(rp.main, offsetstart=offsetstart,
                                         tsn=tsn, save_plot=save_plot, sc=sc,
                                         start=start, output_path=output_path,
                                         end=end).reset_index(drop=True)
            summary = pd.merge(summary, rain_data, on='site_id',
                               validate='1:1')
    
    summary_json = summary.to_json(orient="records")
    
    qdb.print_out("runtime = %s" %(datetime.now()-start_time))
    
    return summary_json
示例#12
0
def main():

    # asks for tsm name
    while True:
        props = qdb.get_tsm_list(input('sensor name: '))
        if len(props) == 1:
            break
        else:
            qdb.print_out('sensor name is not in the list')
            continue

    tsm_props = props[0]

    # asks if to plot from date activated (or oldest data) to most recent data
    while True:
        input_text = 'plot from start to end of data? (Y/N): '
        plot_all_data = input(input_text).lower()
        if plot_all_data == 'y' or plot_all_data == 'n':
            break

    # asks if to specify end timestamp of monitoring window
    if plot_all_data == 'n':
        while True:
            input_text = 'specify end timestamp of monitoring window? (Y/N): '
            test_specific_time = input(input_text).lower()
            if test_specific_time == 'y' or test_specific_time == 'n':
                break

        # ask for timestamp of end of monitoring window defaults to datetime.now
        if test_specific_time == 'y':
            while True:
                try:
                    input_text = 'plot end timestamp (format: 2016-12-31 23:30): '
                    end = pd.to_datetime(input(input_text))
                    break
                except:
                    print('invalid datetime format')
                    continue
        else:
            end = datetime.now()

        # monitoring window and server configurations
        window, sc = rtw.get_window(end)

        # asks if to plot with 3-day monitoring window
        while True:
            input_text = 'plot with 3-day monitoring window? (Y/N): '
            three_day_window = input(input_text).lower()
            if three_day_window == 'y' or three_day_window == 'n':
                break

        # asks start of monitoring window defaults to values in server config
        if three_day_window == 'n':
            while True:
                input_text = 'start of monitoring window (in days) '
                input_text += 'or datetime (format: 2016-12-31 23:30): '
                start = input(input_text)
                try:
                    window.start = window.end - timedelta(int(start))
                    break
                except:
                    try:
                        window.start = pd.to_datetime(start)
                        break
                    except:
                        print('datetime format or integer only')
                        continue

            # computes offsetstart from given start timestamp
            window.offsetstart = window.start - timedelta(
                days=(sc['subsurface']['num_roll_window_ops'] * window.numpts -
                      1) / 48.)

    else:
        # check date of activation
        query = "SELECT date_activated FROM tsm_sensors"
        query += " WHERE tsm_name = '%s'" % tsm_props.tsm_name
        try:
            date_activated = qdb.db.df_read(query).values[0][0]
        except:
            date_activated = pd.to_datetime('2010-01-01')
        #compute for start to end timestamp of data
        query = "(SELECT * FROM tilt_%s" % tsm_props.tsm_name
        query += " where ts > '%s' ORDER BY ts LIMIT 1)" % date_activated
        query += " UNION ALL"
        query += " (SELECT * FROM tilt_%s" % tsm_props.tsm_name
        query += " ORDER BY ts DESC LIMIT 1)"
        start_end = qdb.db.df_read(query)

        end = pd.to_datetime(start_end['ts'].values[1])
        window, sc = rtw.get_window(end)

        start_dataTS = pd.to_datetime(start_end['ts'].values[0])
        start_dataTS_Year = start_dataTS.year
        start_dataTS_month = start_dataTS.month
        start_dataTS_day = start_dataTS.day
        start_dataTS_hour = start_dataTS.hour
        start_dataTS_minute = start_dataTS.minute
        if start_dataTS_minute < 30: start_dataTS_minute = 0
        else: start_dataTS_minute = 30
        window.offsetstart = datetime.combine(
            date(start_dataTS_Year, start_dataTS_month, start_dataTS_day),
            time(start_dataTS_hour, start_dataTS_minute, 0))

        # computes offsetstart from given start timestamp
        window.start = window.offsetstart + timedelta(days=(
            sc['subsurface']['num_roll_window_ops'] * window.numpts - 1) / 48.)

    # asks if to plot velocity and asks for interval and legends to show in
    # column position plots if to plot all data or if monitoring window not
    # equal to 3 days
    if plot_all_data == 'y' or three_day_window == 'n':
        # asks for interval between column position plots
        while True:
            try:
                input_text = 'interval between column position plots, in hours: '
                col_pos_interval = int(input(input_text))
                break
            except:
                qdb.print_out('enter an integer')
                continue

        # computes for interval and number of column position plots
        sc['subsurface']['col_pos_interval'] = str(col_pos_interval) + 'H'
        sc['subsurface']['num_col_pos'] = int(
            (window.end - window.start).total_seconds() /
            (3600 * col_pos_interval) + 1)

        # asks if to plot all legends
        while True:
            input_text = 'show all legends in column position plot? (Y/N): '
            show_all_legend = input(input_text).lower()
            if show_all_legend == 'y' or show_all_legend == 'n':
                break

        if show_all_legend == 'y':
            show_part_legend = False
        # asks which legends to show
        elif show_all_legend == 'n':
            while True:
                try:
                    show_part_legend = int(input('every nth legend to show: '))
                    if show_part_legend <= sc['subsurface']['num_col_pos']:
                        break
                    else:
                        input_text = 'integer should be less than '
                        input_text += 'the number of colpos dates to plot: '
                        input_text += '%s' % (sc['subsurface']['num_col_pos'])
                        qdb.print_out(input_text)
                        continue
                except:
                    qdb.print_out('enter an integer')
                    continue

        while True:
            plotvel = input('plot velocity? (Y/N): ').lower()
            if plotvel == 'y' or plotvel == 'n':
                break

        if plotvel == 'y':
            plotvel = True
        else:
            plotvel = False

        three_day_window = False
    else:
        plotvel = True
        show_part_legend = True
        three_day_window = True

    # asks which point to fix in column position plots
    while True:
        input_text = 'column fix for colpos (top/bottom). '
        input_text += 'press enter to skip; '
        input_text += 'default for monitoring is fix bottom: '
        column_fix = input(input_text).lower()
        if column_fix in ['top', 'bottom', '']:
            break

    if column_fix == '':
        column_fix = 'bottom'
    sc['subsurface']['column_fix'] = column_fix

    # mirror xz and/or xy colpos
    while True:
        try:
            mirror_xz = bool(int(input('mirror image of xz colpos? (0/1): ')))
            break
        except:
            print('Invalid. 1 for mirror image of xz colpos else 0')
            continue
    while True:
        try:
            mirror_xy = bool(int(input('mirror image of xy colpos? (0/1): ')))
            break
        except:
            print('Invalid. 1 for mirror image of xy colpos else 0')
            continue

    while True:
        print_disp_vel = input(
            'print displacement and velocity to csv? (Y/N): ').lower()
        if print_disp_vel == 'y' or print_disp_vel == 'n':
            break

    data = proc.proc_data(tsm_props,
                          window,
                          sc,
                          realtime=True,
                          comp_vel=plotvel,
                          analysis=False)

    if print_disp_vel == 'y':
        tilt = data.tilt.reset_index()
        accel = data.accel_data.reset_index()
        df = pd.merge(tilt,
                      accel,
                      on=['ts', 'node_id', 'tsm_name'],
                      how='outer').sort_values(['ts', 'node_id'])
        df.to_csv('{}_{}-{}.csv'.format(tsm_props.tsm_name,
                                        window.start.strftime('%Y%m%d%H%M'),
                                        window.end.strftime('%Y%m%d%H%M')),
                  index=False)

    plotter.main(data,
                 tsm_props,
                 window,
                 sc,
                 plotvel=plotvel,
                 show_part_legend=show_part_legend,
                 realtime=True,
                 plot_inc=False,
                 three_day_window=three_day_window,
                 mirror_xz=mirror_xz,
                 mirror_xy=mirror_xy)

    return data
示例#13
0
def plot_disp_vel(noise_df, df0off, cs_df, colname, window, sc, plotvel,
                  xzd_plotoffset, num_nodes, velplot, plot_inc, inc_df=''):
#==============================================================================
# 
#     DESCRIPTION:
#     returns plot of xz & xy displacements per node, xz & xy velocities per node
# 
#     INPUT:
#     xz; array of floats; linear displacements along the planes defined by xa-za
#     xy; array of floats; linear displacements along the planes defined by xa-ya
#     xz_vel; array of floats; velocity along the planes defined by xa-za
#     xy_vel; array of floats; velocity along the planes defined by xa-ya
#==============================================================================

    if plotvel:
        vel_xz, vel_xy, L2_xz, L2_xy, L3_xz, L3_xy = velplot

    df0off = df0off.set_index('ts')
    
    fig=plt.figure()

    try:
        if plotvel:
            #creating subplots        
            ax_xzd=fig.add_subplot(141)
            ax_xyd=fig.add_subplot(142,sharex=ax_xzd,sharey=ax_xzd)
            ax_xzd.grid(True)
            ax_xyd.grid(True)
            
            ax_xzv=fig.add_subplot(143)
            ax_xzv.invert_yaxis()
            ax_xyv=fig.add_subplot(144,sharex=ax_xzv,sharey=ax_xzv)
        else:
            #creating subplots        
            ax_xzd=fig.add_subplot(121)
            ax_xyd=fig.add_subplot(122,sharex=ax_xzd,sharey=ax_xzd)
            ax_xzd.grid(True)
            ax_xyd.grid(True)
    except:
        if plotvel:
            #creating subplots                      
            ax_xzv=fig.add_subplot(121)
            ax_xzv.invert_yaxis()
            ax_xyv=fig.add_subplot(122,sharex=ax_xzv,sharey=ax_xzv)
    
    try:
        #plotting cumulative (surface) displacments
        ax_xzd.plot(cs_df.index, cs_df['xz'].values, color='0.4', linewidth=0.5)
        ax_xyd.plot(cs_df.index, cs_df['xy'].values, color='0.4', linewidth=0.5)
        ax_xzd.fill_between(cs_df.index, cs_df.xz, xzd_plotoffset*(num_nodes),
                            color='0.8')
        ax_xyd.fill_between(cs_df.index, cs_df.xy, xzd_plotoffset*(num_nodes),
                            color='0.8')
    except:
        qdb.print_out('Error in plotting cumulative surface displacement')
        
    try:
        #assigning non-repeating colors to subplots axis
        ax_xzd=nonrepeat_colors(ax_xzd,num_nodes)
        ax_xyd=nonrepeat_colors(ax_xyd,num_nodes)
    except:
        qdb.print_out('Error in assigning non-repeating colors in displacement')
    
    if plotvel:
        ax_xzv=nonrepeat_colors(ax_xzv,num_nodes)
        ax_xyv=nonrepeat_colors(ax_xyv,num_nodes)

    #plotting displacement for xz
    curax=ax_xzd
    plot_disp(curax, 'xz', df0off)
    plot_noise_env(curax, 'xz', noise_df)
    plot_annotation(curax, 'xz', df0off, inc_df, plot_inc)
    curax.set_title('displacement\n downslope', fontsize='medium')
    curax.set_ylabel('displacement scale, m')

    #plotting displacement for xy
    curax=ax_xyd
    plot_disp(curax, 'xy', df0off)
    plot_noise_env(curax, 'xy', noise_df)
    plot_annotation(curax, 'xy', df0off, inc_df, plot_inc)
    curax.set_title('displacement\n across slope', fontsize='medium')
           
    if plotvel:
        #plotting velocity for xz
        curax=ax_xzv

        vel_xz.plot(ax=curax, marker='.', legend=False)

        L2_xz = L2_xz.sort_values('ts', ascending = True).set_index('ts')
        nodal_L2_xz = L2_xz.groupby('node_id')
        nodal_L2_xz.apply(lambda x: x['node_id'].plot(marker='^', ms=8, mfc='y',
                          lw=0,ax = curax))

        L3_xz = L3_xz.sort_values('ts', ascending = True).set_index('ts')
        nodal_L3_xz = L3_xz.groupby('node_id')
        nodal_L3_xz.apply(lambda x: x['node_id'].plot(marker='^', ms=10, mfc='r',
                          lw=0,ax = curax))
        
        y = sorted(range(1, num_nodes+1), reverse = True)
        x = (vel_xz.index)[1]
        z = sorted(range(1, num_nodes+1), reverse = True)
        for i,j in zip(y,z):
            curax.annotate(str(int(j)), xy=(x,i), xytext = (5,-2.5),
                           textcoords='offset points',size = 'x-small')            
        curax.set_ylabel('node ID')
        curax.set_title('velocity alerts\n downslope', fontsize='medium')  
    
        #plotting velocity for xy        
        curax=ax_xyv

        vel_xy.plot(ax=curax, marker='.', legend=False)
        
        L2_xy = L2_xy.sort_values('ts', ascending = True).set_index('ts')
        nodal_L2_xy = L2_xy.groupby('node_id')
        nodal_L2_xy.apply(lambda x: x['node_id'].plot(marker='^', ms=8, mfc='y',
                          lw=0,ax = curax))

        L3_xy = L3_xy.sort_values('ts', ascending = True).set_index('ts')
        nodal_L3_xy = L3_xy.groupby('node_id')
        nodal_L3_xy.apply(lambda x: x['node_id'].plot(marker='^', ms=10, mfc='r',
                          lw=0,ax = curax))
               
        y = range(1, num_nodes+1)
        x = (vel_xy.index)[1]
        z = range(1, num_nodes+1)
        for i,j in zip(y,z):
            curax.annotate(str(int(j)), xy=(x,i), xytext = (5,-2.5),
                           textcoords='offset points',size = 'x-small')            
        curax.set_title('velocity alerts\n across slope', fontsize='medium')                        
        
    # rotating xlabel
    for tick in list(ax_xzd.xaxis.get_minor_ticks()) \
            + list(ax_xyd.xaxis.get_minor_ticks()) \
            + list(ax_xzd.xaxis.get_major_ticks()) \
            + list(ax_xyd.xaxis.get_major_ticks()):
        tick.label.set_rotation('vertical')

    if plotvel:
        for tick in list(ax_xzv.xaxis.get_minor_ticks()) \
                + list(ax_xyv.xaxis.get_minor_ticks()) \
                + list(ax_xzv.xaxis.get_major_ticks()) \
                + list(ax_xyv.xaxis.get_major_ticks()):
            tick.label.set_rotation('vertical')

    try:
        dfmt = md.DateFormatter('%Y-%m-%d\n%H:%M')
        ax_xzd.xaxis.set_major_formatter(dfmt)
        ax_xyd.xaxis.set_major_formatter(dfmt)
    except:
        qdb.print_out('Error in setting date format of x-label in disp subplots')

    fig.set_tight_layout(True)
    
    fig.subplots_adjust(top=0.85)
    fig.suptitle(colname)
示例#14
0
def main(tsm_name='', end='', end_mon=False):
    run_start = datetime.now()
    qdb.print_out(run_start)
    qdb.print_out(tsm_name)

    if tsm_name == '':
        tsm_name = sys.argv[1].lower()

    if end == '':
        try:
            end = pd.to_datetime(sys.argv[2])
        except:
            end = datetime.now()
    else:
        end = pd.to_datetime(end)

    window, sc = rtw.get_window(end)

    tsm_props = qdb.get_tsm_list(tsm_name)[0]
    data = proc.proc_data(tsm_props, window, sc)
    tilt = data.tilt[window.start:window.end]
    lgd = data.lgd
    tilt = tilt.reset_index().sort_values('ts', ascending=True)

    if lgd.empty:
        qdb.print_out('%s: no data' % tsm_name)
        return

    nodal_tilt = tilt.groupby('node_id', as_index=False)
    alert = nodal_tilt.apply(lib.node_alert,
                             colname=tsm_props.tsm_name,
                             num_nodes=tsm_props.nos,
                             disp=float(sc['subsurface']['disp']),
                             vel2=float(sc['subsurface']['vel2']),
                             vel3=float(sc['subsurface']['vel3']),
                             k_ac_ax=float(sc['subsurface']['k_ac_ax']),
                             lastgooddata=lgd,
                             window=window,
                             sc=sc).reset_index(drop=True)

    alert.loc[:, 'col_alert'] = -1
    col_alert = pd.DataFrame({
        'node_id': range(1, tsm_props.nos + 1),
        'col_alert': [-1] * tsm_props.nos
    })
    node_col_alert = col_alert.groupby('node_id', as_index=False)
    node_col_alert.apply(lib.column_alert,
                         alert=alert,
                         num_nodes_to_check=int(
                             sc['subsurface']['num_nodes_to_check']),
                         k_ac_ax=float(sc['subsurface']['k_ac_ax']),
                         vel2=float(sc['subsurface']['vel2']),
                         vel3=float(sc['subsurface']['vel3']))

    valid_nodes_alert = alert.loc[~alert.node_id.isin(data.inv)]

    if max(valid_nodes_alert['col_alert'].values) > 0:
        pos_alert = valid_nodes_alert[valid_nodes_alert.col_alert > 0]
        site_alert = trend.main(pos_alert, tsm_props.tsm_id, window.end,
                                data.inv)
    else:
        site_alert = max(
            lib.get_mode(list(valid_nodes_alert['col_alert'].values)))

    tsm_alert = pd.DataFrame({
        'ts': [window.end],
        'tsm_id': [tsm_props.tsm_id],
        'alert_level': [site_alert],
        'ts_updated': [window.end]
    })

    qdb.alert_to_db(tsm_alert, 'tsm_alerts')

    qdb.write_op_trig(tsm_props.site_id, window.end)

    qdb.print_out(tsm_alert)

    qdb.print_out('run time = ' + str(datetime.now() - run_start))

    return tilt
            if len(valid_cotriggers) != 0:
                for index, valid in valid_cotriggers.iterrows():
                    # Don't delete public alert entry if there
                    # is a co-trigger that's equal or
                    # greater of alert level
                    if (valid['alert_level'] >= alert_level):
                        qdb.print_out(
                            "%s has valid co-trigger: deleting will NOT commence"
                            % (site_code.upper()))
                        dont_delete = True
                        break

            if dont_delete == False:
                qdb.delete_public_alert(site_id, public_ts_start)
                qdb.print_out("Deleted {} public alert of {}".format(
                    public_ts_start, site_code))


###############################################################################

if __name__ == "__main__":
    start_time = datetime.now()
    qdb.print_out(start_time)

    main()

    # test: valit lt of INA
    # pd.to_datetime('2020-05-17 15:00')

    qdb.print_out('runtime = %s' % (datetime.now() - start_time))
def main(end_ts=datetime.now()):
    start_time = datetime.now()
    qdb.print_out(start_time)
    
    start_ts = pd.to_datetime(end_ts) - timedelta(1)

    surficial_triggers = get_surficial_trigger(start_ts, end_ts)
    
    if len(surficial_triggers) == 0:
        qdb.print_out("=================");
        qdb.print_out("No surficial trigger (lt, l2, l3) to process")
    
    for index, surficial in surficial_triggers.iterrows():
        ts_updated = surficial['ts_updated']
        public_ts_start = round_data_ts(ts_updated)
        alert_level = surficial['alert_level']
        alert_symbol = surficial['alert_symbol']
        alert_status = surficial['alert_status']
        site_id = surficial['site_id']
        site_code = surficial['site_code']
        
        if (alert_symbol == 'lt'):
            if (alert_status == 1):
                qdb.print_out("=================");
                qdb.print_out("Found valid lt surficial trigger for " + \
                              "%s at %s" %(site_code.upper(), ts_updated))
                qdb.print_out(" > Added l2 trigger on operational triggers")
                insert_l2_operational_trigger(ts_updated, site_id)
        
        # Process only l2 and l3 with alert status of -1 (invalid)
        elif (alert_status == -1): 
            valid_cotriggers = get_valid_cotriggers(site_id, public_ts_start)            
            dont_delete = False
            # Check if it has co-triggers on start of event
            # tho highly unlikely
            if len(valid_cotriggers) != 0:
                for index, valid in valid_cotriggers.iterrows():
                    # Don't delete public alert entry if there
                    # is a co-trigger that's equal or 
                    # greater of alert level
                    if (valid['alert_level'] >= alert_level):
                        qdb.print_out("=================");
                        qdb.print_out("%s has valid co-trigger: deleting will NOT commence" %(site_code.upper()))
                        dont_delete = True
                        break
    
            if dont_delete == False:
                qdb.print_out("=================");
                qdb.print_out("Deleting public alert for site " + \
                              "%s (%s) at %s" %(site_code.upper(), site_id, public_ts_start))
                delete_invalid_public_alert_entry(site_id, public_ts_start)
                
                # update ts_updated of latest entry for that site to current time
                # using round_data_ts(datetime.now())
                update_ts_last_site_public_alert(site_id, round_data_ts(datetime.now()))
    
    qdb.print_out("=================");
    qdb.print_out('runtime = %s' %(datetime.now() - start_time))