コード例 #1
0
def import_dt_exclusions_email():
    maxdate = db.max_date_db(table='DowntimeExclusions', field='Date') + delta(days=2)
    df = combine_email_data(
        folder='Downtime',
        maxdate=maxdate,
        subject='Equipment Availability',
        header=0)

    df = process_df_exclusions(df=df)
    rowsadded = db.insert_update(a='DowntimeExclusions', df=df)
コード例 #2
0
def import_unit_hrs_email(minesite: str) -> None:
    from smseventlog.utils.exchange import combine_email_data
    maxdate = db.max_date_db(
        table='UnitSMR', field='DateSMR', minesite=minesite) + delta(days=1)

    df = combine_email_data(folder='SMR',
                            maxdate=maxdate,
                            **m_config.get(minesite, {}))

    if not df is None and len(df) > 0:
        df = df.pipe(process_df_smr, minesite=minesite)

    rowsadded = db.insert_update(a='UnitSMR', df=df)
コード例 #3
0
    def to_sql(self):
        """Save df to database
        - test_results is list of dicts, need to serialize
        - don't need customer in db"""

        df = self.df_samples() \
            .assign(
                test_results=lambda x: x.test_results.apply(json.dumps),
                test_flags=lambda x: x.test_flags.apply(json.dumps)) \
            .reset_index(drop=False)

        return db.insert_update(
            a='OilSamples',
            join_cols=['hist_no'],
            df=df,
            notification=True,
            # import_name='OilSamples',
            chunksize=5000)
コード例 #4
0
def import_avail_local(p: Path = None) -> None:
    """Temp import avail data from local folder"""
    if p is None:
        p = cf.desktop / 'downtime'
        lst_paths = list(p.glob('*.csv'))
        dfs = [pd.read_csv(p_csv, header=3) for p_csv in lst_paths]
        df = pd.concat(dfs)
    else:
        df = pd.read_csv(p, header=3)
        lst_paths = []

    df = df \
        .pipe(process_df_downtime)

    rowsadded = db.insert_update(a='Downtime', df=df)

    for p in lst_paths:
        p.unlink()
コード例 #5
0
def import_smr_local(minesite: str) -> None:
    """Temp import unit smr data from local folder"""

    p = cf.desktop / 'smr'
    m = m_config[minesite]
    subject = m.get('subject')

    lst_paths = list(p.glob(f'*{subject}*.csv'))

    dfs = [pd.read_csv(p_csv, header=m.get('header')) for p_csv in lst_paths]

    df = pd.concat(dfs) \
        .pipe(process_df_smr, minesite=minesite)

    rowsadded = db.insert_update(a='UnitSMR', df=df)

    for p in lst_paths:
        p.unlink()
コード例 #6
0
def import_basemine_components(p=None):
    """Read OSB component db, fix cols/values, import to db"""

    # get min UID from db, decrement from there
    sql = 'SELECT Min(UID) From EventLog'
    uid_min = db.cursor.execute(sql).fetchval() - 1

    df = load_basemine_componennt_db(p=p) \
        .assign(
            UID=lambda x: np.arange(uid_min - x.shape[0], uid_min),
            DateCompleted=lambda x: x.DateAdded,
            CreatedBy='osb_import',
            StatusEvent='Complete',
            StatusWO='Closed',
            COConfirmed=True,
            ComponentCO=True)

    join_cols = ['Unit', 'Floc', 'DateAdded']
    return db.insert_update(a='EventLog', join_cols=join_cols, df=df)
コード例 #7
0
def import_csv_df(df: pd.DataFrame, ftype: str, **kw) -> int:
    """Import fault or plm df combined from csvs"""

    df = filter_existing_records(df=df, ftype=ftype)

    if len(df) == 0:
        log.info(f'0 rows to import. ftype: {ftype}')
        return 0

    m = get_config(ftype)
    table_name = m['table_name']
    keys = dbt.get_dbtable_keys(table_name)

    return db.insert_update(
        a=table_name,
        join_cols=keys,
        df=df,
        prnt=True,
        notification=False,
        **kw)
コード例 #8
0
def import_downtime_email():
    maxdate = db.max_date_db(table='Downtime', field='ShiftDate') + delta(days=2)
    df = combine_email_data(folder='Downtime', maxdate=maxdate, subject='Equipment Downtime')
    df = process_df_downtime(df=df)
    rowsadded = db.insert_update(a='Downtime', df=df)
コード例 #9
0
def import_single(p):
    df = pd.read_csv(p, header=2)
    df = process_df_downtime(df=df)
    rowsadded = db.insert_update(a='Downtime', df=df)