コード例 #1
0
def log(server,
        database,
        table,
        run_time_start,
        data_from_time,
        internal_table,
        run_result,
        comment,
        username=None,
        password=None):
    """

    """
    run_time_end = datetime.today().strftime('%Y-%m-%d %H:%M:%S')
    log1 = pd.DataFrame([[
        run_time_start, run_time_end, data_from_time, internal_table,
        run_result, comment
    ]],
                        columns=[
                            'RunTimeStart', 'RunTimeEnd', 'DataFromTime',
                            'InternalTable', 'RunResult', 'Comment'
                        ])
    mssql.to_mssql(log1,
                   server,
                   database,
                   table,
                   username=username,
                   password=password)

    return log1
コード例 #2
0
ファイル: test_mssql.py プロジェクト: mullenkamp/pdsql
def test_del_rows():
    ## Write table
    mssql.to_mssql(df1, server, database, table)

    ## Read table
    beta1 = mssql.rd_sql(server, database, table)

    ## Delete parts
    mssql.del_mssql_table_rows(server, database, table, pk_df=df2)

    ## Test
    beta2 = mssql.rd_sql(server, database, table)
    beta3 = beta1.set_index(['pk1', 'pk2'])
    beta3.index.isin(df2.set_index(['pk1', 'pk2']).index)
    beta4 = beta3.loc[~beta3.index.isin(df2.set_index(['pk1', 'pk2']).index)].reset_index()

    ## Remove table
    engine = mssql.create_engine('mssql', server, database)
    conn = engine.connect()
    trans = conn.begin()
    conn.execute("IF OBJECT_ID(" + str([str(table)])[1:-1] + ", 'U') IS NOT NULL drop table " + table)
    trans.commit()
    conn.close()

    assert all(beta2 == beta4)
コード例 #3
0
def create_site_mtype(server, database, site, ref_point, new_mtype):
    """
    Function to create a new mtype for a specific site. A reference point number of an existing mtype of the same site must be used for creation. Run get_sites_mtypes to find a good reference point.

    Parameters
    ----------
    server : str
        The server where the Hydrotel database lays.
    database : str
        The name of the Hydrotel database.
    site : str
        The site to create the new mtype on.
    ref_point : int
        The reference point from another mtype on the same site.
    new_mtype : str
        The new mtype name. Must be unique for the associated site.

    Returns
    -------
    DataFrame
        New object and point values extracted by the get_sites_mtypes function.
    """
    ## Checks
    site_mtypes = get_sites_mtypes(server, database, sites=site).reset_index()

    if not (site_mtypes.Point == ref_point).any():
        raise ValueError('model_point must be a Point that exists within the mtypes of the site')
    if (site_mtypes.MType == new_mtype.lower()).any():
        raise ValueError('new_name already exists as an mtype, please use a different name')

    ## Import object/point data
    point_val = rd_sql(server, database, points_tab, where_in={'Point': [ref_point]})
    obj_val = rd_sql(server, database, objects_tab, where_in={'Object': point_val.Object.tolist()})

    treeindex1 = int(rd_sql(server, database, stmt='select max(TreeIndex) from {tab} where Site = {site}'.format(tab=objects_tab, site=int(obj_val.Site))).iloc[0])

    ## Assign new object data
    obj_val2 = obj_val.drop('Object', axis=1).copy()
    obj_val2['Name'] = new_mtype
    obj_val2['TreeIndex'] = treeindex1 + 1

    to_mssql(obj_val2, server, database, objects_tab)

    ## Find out what the new object value is
    new_obj = int(rd_sql(server, database, objects_tab, where_in={'Site': obj_val.Site.tolist(), 'Name': [new_mtype]}).Object)

    ## Assign new point data
    point_val2 = point_val.drop('Point', axis=1).copy()
    point_val2['Name'] = new_mtype
    point_val2['Object'] = new_obj

    to_mssql(point_val2, server, database, points_tab)

    ## Return new values
    site_mtypes = get_sites_mtypes(server, database, sites=site, mtypes=new_mtype)

    return site_mtypes
コード例 #4
0
def log(run_time_start, data_from_time, data_to_time, database_name,
        table_name, run_result, comment):
    """

    """
    run_time_end = pd.Timestamp.today()
    log1 = pd.DataFrame([[
        run_time_start, run_time_end, data_from_time, data_to_time,
        database_name[:99], table_name[:99], run_result[:9], comment[:299]
    ]],
                        columns=[
                            'RunTimeStart', 'RunTimeEnd', 'DataFromTime',
                            'DataToTime', 'DatabaseName', 'TableName',
                            'RunResult', 'Comment'
                        ])
    mssql.to_mssql(log1, param['Output']['log_server'],
                   param['Output']['log_database'],
                   param['Output']['log_table'])

    return log1
コード例 #5
0
def main(param):
    logging.basicConfig(filename='dhi-to-hydrotel.log',
                        format='%(asctime)s: %(levelname)s: %(message)s',
                        level=logging.INFO)

    run_time_start = pd.Timestamp.now()

    ######################################
    ### Parameters

    base_path = param['Input']['base_path']
    result_folders = param['Input']['result_folders']
    file_index = param['Input']['file_index']
    min_file_size = param['Input']['min_file_size']

    ####################################
    ### Get the files

    logging.info('--Get the files')

    files = {}
    for fold in result_folders:
        files1 = [
            files.update({
                os.path.join(base_path, fold, f):
                int(os.path.getmtime(os.path.join(base_path, fold, f)))
            }) for f in os.listdir(os.path.join(base_path, fold))
            if (f.endswith('.txt') and (os.path.getsize(
                os.path.join(base_path, fold, f)) > min_file_size))
        ]

    if not files:
        logging.error('No files found...something is wrong')
        raise ValueError('No files found...something is wrong')

    logging.info('Compare to previous run')

    if os.path.isfile(os.path.join(base_path, file_index)):
        with open(os.path.join(base_path, file_index)) as file:
            old_files = json.load(file)
    else:
        logging.info('First run')
        old_files = {}

    new_files = {}
    for f in files:
        if f in old_files:
            if files[f] > old_files[f]:
                new_files.update({f: files[f]})
        else:
            new_files.update({f: files[f]})

    combo_files = old_files.copy()
    combo_files.update(new_files)

    if new_files:
        ####################################
        ### Parse txt files and save to Hydrotel

        logging.info('Saving new data')

        regex = re.compile('\d+')
        date_col = 'Time'

        for f in new_files:
            data1 = pd.read_table(f,
                                  skiprows=1,
                                  parse_dates=[date_col],
                                  infer_datetime_format=True)
            cols = data1.columns.copy()
            data_col = [c for c in cols if c != date_col][0]

            point_num = int(regex.findall(data_col)[0])

            data2 = data1.rename(columns={
                date_col: 'DT',
                data_col: 'SampleValue'
            }).copy()
            data2['DT'] = data2['DT'].dt.floor('5Min')
            data2 = data2[data2.DT >= (run_time_start -
                                       pd.DateOffset(hours=12))]
            data2['Point'] = point_num
            data2['Quality'] = param['Output']['quality_code']
            data2['BypassValidation'] = 0
            data2['BypassAlarms'] = 0
            data2['BypassScaling'] = 0
            data2['BypassTimeOffset'] = 0
            data2['Priority'] = 3

            try:
                mssql.to_mssql(data2, param['Output']['server'],
                               param['Output']['database'],
                               param['Output']['sample_table'])
            except Exception as err:
                logging.error(str(err))
                raise ValueError(str(err))

        ## Save new index

        with open(os.path.join(base_path, file_index), 'w') as outfile:
            json.dump(combo_files, outfile)

    else:
        logging.info('No new files found')

    logging.info('Success')
コード例 #6
0
        allo_site_trig = mssql.rd_sql(param['output']['server'], param['output']['database'], 'CrcAlloSite', ['CrcAlloSiteID', 'RecordNumber', 'AlloBlockID', 'SiteID'], where_in={'SiteType': ['LowFlow', 'Residual']}, username=param['output']['username'], password=param['output']['password'])

        trig_cond1 = mssql.rd_sql(param['output']['server'], param['output']['database'], 'LowFlowConditions', ['CrcAlloSiteID', 'BandNumber'], username=param['output']['username'], password=param['output']['password'])

        allo_site_trig1 = pd.merge(trig_cond1, allo_site_trig, on='CrcAlloSiteID')

        # Combine tables
        restr_ts2 = pd.merge(sites1, restr5, on='ExtSiteID').drop('ExtSiteID', axis=1)

        restr_ts3 = pd.merge(restr_ts2, allo_site_trig1, on=['RecordNumber', 'SiteID', 'BandNumber']).drop(['RecordNumber', 'SiteID', 'BandNumber', 'AlloBlockID'], axis=1).drop_duplicates(['CrcAlloSiteID', 'RestrDate'])

        restr_ts3.RestrDate = restr_ts3.RestrDate.dt.strftime('%Y-%m-%d')

        # Save results
        print('Save results')
        mssql.to_mssql(restr_ts3, param['output']['server'], param['output']['database'], table1, username=param['output']['username'], password=param['output']['password'])
    #    new_restr_ts = mssql.update_from_difference(restr_ts3, param['output']['server'], param['output']['database'], 'TSLowFlowRestr', on=['CrcAlloSiteID', 'RestrDate'], mod_date_col='ModifiedDate')

        # Log
        log1 = util.log(param['output']['server'], param['output']['database'], 'log', run_time_start, last_date1, table1, 'pass', '{} rows updated'.format(len(restr_ts3)), username=param['output']['username'], password=param['output']['password'])
    else:
        # Log
        log1 = util.log(param['output']['server'], param['output']['database'], 'log', run_time_start, last_date1, table1, 'pass', 'Todays restrictions were already saved', username=param['output']['username'], password=param['output']['password'])

    #####################################
    ### TSLowFlowSite
    print('--TSLowFlowSite')
    table1 = lowflow_site_ts_table

    ## Determine last restriction date run
コード例 #7
0
    ###############################
    ### Log

    today2 = str(datetime.today().strftime('%Y-%m-%d %H:%M:%S'))
    today3 = str(today.strftime('%Y-%m-%d %H:%M:%S'))

    log1 = pd.DataFrame([[
        today3, 'Freshwater Maps', 'pass', 'all good',
        str(map1.start_date), today2
    ]],
                        columns=[
                            'Time', 'HydroTable', 'RunResult', 'Comment',
                            'FromTime', 'RunTimeEnd'
                        ])
    to_mssql(log1, param.hydro_server, param.hydro_database, 'ExtractionLog')
except Exception as err:
    err1 = err
    print(err1)
    today2 = str(datetime.today().strftime('%Y-%m-%d %H:%M:%S'))
    log2 = pd.DataFrame([[
        today2, 'Freshwater Maps', 'fail',
        str(err1)[:299],
        str(map1.start_date), today2
    ]],
                        columns=[
                            'Time', 'HydroTable', 'RunResult', 'Comment',
                            'FromTime', 'RunTimeEnd'
                        ])
    to_mssql(log2, param.hydro_server, param.hydro_database, 'ExtractionLog')
コード例 #8
0
        combo2.rename(columns={'DateTime': 'DT'}, inplace=True)

        ## Nat flow
        nat_flow = combo2[['DT', 'nat_flow']].copy()

        nat_flow['Point'] = param['Output']['unmod_point']
        nat_flow['Quality'] = param['Output']['quality_code']
        nat_flow['BypassValidation'] = 0
        nat_flow['BypassAlarms'] = 0
        nat_flow['BypassScaling'] = 0
        nat_flow['BypassTimeOffset'] = 0
        nat_flow['Priority'] = 3
        nat_flow.rename(columns={'nat_flow': 'SampleValue'}, inplace=True)

        mssql.to_mssql(nat_flow, param['Output']['hydrotel_server'],
                       'hydrotel', 'SampleBuf')

        str1 = '{det} data points added to {mtype} (Point {point})'.format(
            det=len(combo2),
            mtype=param['Input']['unmod_mtype'],
            point=param['Output']['unmod_point'])

        print(str1)

        #        util.log(run_time_start, from_date, combo2.DT.max(), 'Hydrotel', 'SampleBuf', 'pass', '{det} data points added to {mtype} (Point {point})'.format(det=len(combo2), mtype=param['Input']['unmod_mtype'], point=param['Output']['unmod_point']))

        ## Other flow
        other_flow = combo2[['DT', 'other']].copy()

        other_flow['Point'] = param['Output']['other_point']
        other_flow['Quality'] = param['Output']['quality_code']
コード例 #9
0
    if isinstance(last_val1, pd.Timestamp):
        det1 = det1[det1.DateTime > last_val1].copy()

    #####################################
    ### Save to Hydrotel and log result

    if not det1.empty:
        det1['Point'] = param['Output']['new_point']
        det1['Quality'] = param['Output']['quality_code']
        det1.rename(columns={
            'DateTime': 'DT',
            'de-tided': 'SampleValue'
        },
                    inplace=True)

        mssql.to_mssql(det1, param['Output']['server'],
                       param['Input']['database'], 'Samples')
        util.log(
            run_time_start, from_date, det1.DT.max(), 'Hydrotel', 'Samples',
            'pass',
            '{det} data points added to {mtype} (Point {point})'.format(
                det=len(det1),
                mtype=param['Input']['new_mtype'],
                point=param['Output']['new_point']))

    else:
        util.log(run_time_start, to_date, to_date, 'Hydrotel', 'Samples',
                 'pass', 'No data needed to be added')

except Exception as err:
    err1 = err
    print(err1)
コード例 #10
0
        # Process the results
        site_summ1 = lf.site_summary_ts(str(last_date2),
                                        str(today1),
                                        only_active=only_active).reset_index()

        # Remove potential duplicate sites
        site_summ1.sort_values('MeasurementMethod',
                               ascending=False,
                               inplace=True)
        site_summ2 = site_summ1.drop_duplicates(['ExtSiteID', 'RestrDate'])

        # Save results
        print('Save results')
        mssql.to_mssql(site_summ2,
                       param['output']['server'],
                       param['output']['database'],
                       table1,
                       schema=schema1)

        # Log
        log1 = util.log(param['output']['server'], param['output']['database'],
                        'log', run_time_start, last_date1, table1, 'pass',
                        '{} rows updated'.format(len(site_summ1)))
    else:
        # Log
        log1 = util.log(param['output']['server'], param['output']['database'],
                        'log', run_time_start, last_date1, table1, 'pass',
                        'Todays restrictions were already saved')

    #####################################
    ### TSCrcBlockRestr
コード例 #11
0
    if not det1.empty:
        det1['Point'] = param['Output']['detided_point']
        det1['Quality'] = param['Output']['quality_code']
        det1['BypassValidation'] = 0
        det1['BypassAlarms'] = 0
        det1['BypassScaling'] = 0
        det1['BypassTimeOffset'] = 0
        det1['Priority'] = 3
        det1.rename(columns={
            'DateTime': 'DT',
            'de-tided': 'SampleValue'
        },
                    inplace=True)

        mssql.to_mssql(det1, param['Output']['hydrotel_server'], 'hydrotel',
                       'SampleBuf')

        str1 = '{det} data points added to {mtype} (Point {point})'.format(
            det=len(det1),
            mtype=param['Input']['detided_mtype'],
            point=param['Output']['detided_point'])

        print(str1)

#        util.log(run_time_start, from_date, det1.DT.max(), 'Hydrotel', 'SampleBuf', 'pass', '{det} data points added to {mtype} (Point {point})'.format(det=len(det1), mtype=param['Input']['detided_mtype'], point=param['Output']['detided_point']))

    else:
        print('No data needed to be added')
#        util.log(run_time_start, to_date, to_date, 'Hydrotel', 'SampleBuf', 'pass', 'No data needed to be added')

except Exception as err:
コード例 #12
0
        allo_site_trig = mssql.rd_sql(param['output']['server'], param['output']['database'], 'CrcAlloSite', ['CrcAlloSiteID', 'RecordNumber', 'AlloBlockID', 'SiteID'], where_in={'SiteType': ['LowFlow', 'Residual']})

        trig_cond1 = mssql.rd_sql(param['output']['server'], param['output']['database'], 'LowFlowConditions', ['CrcAlloSiteID', 'BandNumber'])

        allo_site_trig1 = pd.merge(trig_cond1, allo_site_trig, on='CrcAlloSiteID')

        # Combine tables
        restr_ts2 = pd.merge(sites1, restr5, on='ExtSiteID').drop('ExtSiteID', axis=1)

        restr_ts3 = pd.merge(restr_ts2, allo_site_trig1, on=['RecordNumber', 'SiteID', 'BandNumber']).drop(['RecordNumber', 'SiteID', 'BandNumber', 'AlloBlockID'], axis=1).drop_duplicates(['CrcAlloSiteID', 'RestrDate'])

        restr_ts3.RestrDate = restr_ts3.RestrDate.dt.strftime('%Y-%m-%d')

        # Save results
        print('Save results')
        mssql.to_mssql(restr_ts3, param['output']['server'], param['output']['database'], table1)
    #    new_restr_ts = mssql.update_from_difference(restr_ts3, param['output']['server'], param['output']['database'], 'TSLowFlowRestr', on=['CrcAlloSiteID', 'RestrDate'], mod_date_col='ModifiedDate')

        # Log
        log1 = util.log(param['output']['server'], param['output']['database'], 'log', run_time_start, last_date1, table1, 'pass', '{} rows updated'.format(len(restr_ts3)))
    else:
        # Log
        log1 = util.log(param['output']['server'], param['output']['database'], 'log', run_time_start, last_date1, table1, 'pass', 'Todays restrictions were already saved')

    #####################################
    ### TSLowFlowSite
    print('--TSLowFlowSite')
    table1 = lowflow_site_ts_table

    ## Determine last restriction date run
コード例 #13
0
            only_active=only_active,
            username=param['misc']['lowflows']['username'],
            password=param['misc']['lowflows']['password']).reset_index()

        # Remove potential duplicate sites
        site_summ1.sort_values('MeasurementMethod',
                               ascending=False,
                               inplace=True)
        site_summ2 = site_summ1.drop_duplicates(['ExtSiteID', 'RestrDate'])

        # Save results
        print('Save results')
        mssql.to_mssql(site_summ2,
                       param['output']['server'],
                       param['output']['database'],
                       table1,
                       schema=schema1,
                       username=param['output']['username'],
                       password=param['output']['password'])

        # Log
        log1 = util.log(param['output']['server'],
                        param['output']['database'],
                        'log',
                        run_time_start,
                        last_date1,
                        table1,
                        'pass',
                        '{} rows updated'.format(len(site_summ1)),
                        username=param['output']['username'],
                        password=param['output']['password'])