def check_internet(self, host='8.8.8.8', port=53, timeout=3, recheck_time=60): """ Test if internet connection exists before attempting any database operations Host: 8.8.8.8 (google-public-dns-a.google.com) OpenPort: 53/tcp Service: domain (DNS/TCP) recheck_time : int, default 60 only re-check every x seconds """ # raise er.NoInternetError() # testing # Kinda sketch, but just avoid re-checking too frequently if (dt.now() - self.last_internet_success).seconds < recheck_time: return True try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(timeout) s.connect((host, port)) s.shutdown(socket.SHUT_RDWR) s.close() self.last_internet_success = dt.now() return True except socket.error as ex: raise er.NoInternetError()
def __init__(self, parent=None): super().__init__(parent=parent) col_db_startdate, col_db_enddate = 'ShiftDate', 'ShiftDate' df_week = qr.df_period(freq='week') df_month = qr.df_period(freq='month') d = dt.now() + delta(days=-6) default_week = df_week[df_week.start_date < d].iloc[ -1, :].name # index name d = dt.now() + delta(days=-30) default_month = df_month[df_month.start_date < d].iloc[ -1, :].name # index name f.set_self(vars()) self.add_input(field=InputField(text='Week', default=default_week), items=df_week.index, checkbox=True, cb_enabled=False) self.add_input(field=InputField(text='Month', default=default_month), items=df_month.index, checkbox=True, cb_enabled=False) self.add_features(['start_date', 'end_date', 'unit']) self.insert_linesep(i=2)
def collect_plm_files(unit: str, d_lower: dt = None, lst: list = None): """Collect PLM files from p drive and save to desktop - Used for uploading to KA PLM report system - TODO this could be replaced by utl.FileProcessor now? """ start = time.time() p = efl.UnitFolder(unit=unit).p_dls if d_lower is None: d_lower = dt.now() + delta(days=-180) if lst is None: lst = utl.FolderSearch('plm', d_lower=d_lower).search(p) log.info(f'{f.deltasec(start)} | Found {len(lst)} files.') p_dst = cf.desktop / f'plm/{unit}' for p in lst: fl.copy_file(p_src=p, p_dst=p_dst / f'{fl.date_created(p):%Y-%m-%d}_{p.name}') log.info(f'{f.deltasec(start)} | {len(lst)} files copied to desktop.') return lst
def set_allopen(self, **kw): a = self.a self.add_fltr_args([ dict(vals=dict(process_date=dt.now() + delta(days=-6))), # dict(vals=dict(component_id=component)) ])
def update_time(task, duration: float = 1.0, d: dt = None, category=None, em=None, n: int = None): """Add row to table with new task data""" if em is None: em = ExcelModel() if d is None: d = dt.now().date() if isinstance(d, str): if len(d) == 3: d = f.recent_weekday(d) else: d = f.date_from_str(d) if category is None: category = get_matching_task(df=em.get_df(), task=task) m = dict(date=d, duration=duration, task=f'{task[0].upper()}{task[1:]}', task_type=category.title()) em.add_row(m=m, name='ActLog') em.close(n=n, d_lower=f.convert_date(d))
def __init__(self, unit: str): """Object that represents path to unit's base folder Parameters --- Unit: string Examples ------- >>> uf = UnitFolder(unit='F301') >>> uf.p_unit '/Volumes/Public/Fort Hills/02. Equipment Files/1. 980E Trucks/F301 - A40017' """ # get unit's row from unit table, save to self attributes m = db.get_df_unit().loc[unit] f.copy_dict_attrs(m=m, target=self) modelpath = self.get_modelpath() # needs model and model_map unitpath = f'{unit} - {self.serial}' if not 'shovels' in self.minesite.lower(): p_unit = cf.p_drive / f'{self.equippath}/{modelpath}/{unitpath}' else: # shovels doesn't want modelpath. Could make this a list of exclusions or something p_unit = cf.p_drive / f'{self.equippath}/{unitpath}' p_dls = p_unit / 'Downloads' p_dls_year = p_dls / str(dt.now().year) f.set_self(vars())
def __set_default_filter(self, **kw): # NOTE not used, filtering in db.df_fc currently # super().set_default_filter(**kw) a = self.a ct = ((a.Classification == 'M') | (a.ExpiryDate >= dt.now().date())) self.fltr.add(ct=ct) self.fltr.add(vals=dict(Complete=0))
def add_defaults(d: dt = None, em=None): """Add default rows for all dates till current date with 0 time""" # get df # groupby date # get date_range btwn now and start # merge grouped data # filter dates = 0 or Nan if em is None: em = ExcelModel() if not d is None: # init specific day if isinstance(d, str): d = dt.strptime(d, '%Y-%m-%d') day = d.strftime('%a') df = em.get_df(name='Default') \ .pipe(lambda df: df[df.day == day]) \ .drop(columns='day') \ .assign( duration=lambda x: x.duration.astype(float), date=d.date()) else: # init all blank days d = dt.now().date() # type: date n = 90 d_lower = d + delta(days=-n) rng = pd.date_range(d_lower, d) df_default = em.get_df(name='Default') # defaults per day # get sum duration from last n days per day df_sum = em.get_df('ActLog') \ .pipe(lambda df: df[df.date.dt.date >= d_lower]) \ .groupby('date', as_index=False)[['duration']].sum() \ .rename(columns=dict(duration='sum')) # merge default values for all days with 0 duration df = pd.DataFrame(dict(date=rng)) \ .assign(day=lambda x: x.date.dt.strftime('%a')) \ .merge(right=df_sum, on='date', how='left') \ .fillna(0) \ .merge(right=df_default, on='day', how='outer') \ .dropna() \ .pipe(lambda df: df[df['sum'] == 0]) \ .drop(columns=['day', 'sum']) \ .sort_values('date') \ .assign( duration=lambda x: x.duration.astype(float)) lst = list(df.to_dict(orient='index').values()) em.add_row(m=lst) em.close() n_dates = df.groupby('date').size().shape[0] print('\n') log.info(f'Dates initialized: {n_dates}')
def set_lastperiod(self, days=7): if hasattr(self, 'date_col') and not self.date_col is None: vals = {self.date_col: dt.now().date() + delta(days=days * -1)} self.fltr.add(vals=vals, opr=op.ge) return True else: return False
def __init__( self, ftype: str, max_depth: int = 6, d_lower: dt = None): """ Parameters ---------- ftype : str file type to collect (dsc, fault | plm | tr3) max_depth : int, optional max depth to recurse, default 5 d_lower : dt, optional date to filter file date created, default 2016-01-01 """ if not ftype in self.keys: raise ValueError(f'Incorrect ftype "{ftype}", must be in {self.keys}') if d_lower is None: d_lower = dt.now() + delta(days=-180) cfg = self.cfg.get(ftype) expr_exclude = self.make_re_exclude(lst=cfg.get('exclude')) expr_find = cfg.get('find') f.set_self(vars())
def df_period(freq: str, n: int = 0, ytd: bool = False, n_years: int = 1) -> pd.DataFrame: """Return df of periods for specified freq Parameters ---------- freq : str M or W n : int, optional filter last n periods, default 0 ytd : bool, optional filter periods to start of year, default False n_years : int number of previous years Returns ------- pd.DataFrame df of periods """ freq = dict(month='M', week='W').get(freq, freq) # convert from month/week d_upper = dt.now() d_lower = d_upper + delta(days=-365 * n_years) idx = pd.date_range(d_lower, d_upper, freq=freq).to_period() # fmt_week = f'%Y-%{week_letter}' fmt_week = '%G-%V' m = dict( W=dict(fmt_str=fmt_week), M=dict(fmt_str='%Y-%m')) \ .get(freq) def _rename_week(df, do=False): if not do: return df return df \ .assign(name=lambda x: x.period.dt.strftime(f'Week %{week_letter}')) def _filter_ytd(df, do=ytd): if not do: return df return df[df.period >= str(df.period.max().year)] df = pd.DataFrame(index=idx) return df \ .assign( start_date=lambda x: pd.to_datetime(x.index.start_time.date), end_date=lambda x: pd.to_datetime(x.index.end_time.date), d_rng=lambda x: list(zip(x.start_date.dt.date, x.end_date.dt.date)), name=lambda x: x.index.to_timestamp(freq).strftime(m['fmt_str'])) \ .rename_axis('period') \ .reset_index(drop=False) \ .set_index('name', drop=False) \ .pipe(_filter_ytd, do=ytd) \ .pipe(_rename_week, do=freq == 'W') \ .rename(columns=dict(name='name_title')) \ .iloc[-1 * n:]
def update_vals(self, e) -> None: """Update user row current settings before commiting to db""" e.LastLogin = dt.now() e.Ver = VERSION e.NumOpens += 1 e.Domain = self.domain e.UserGroup = self.usergroup e.MineSite = self.minesite e.odbc_driver = get_odbc_driver() e.install_dir = str(cf.p_root)
def __init__(self, d: dt = None, d_rng: Tuple[dt] = None, minesite: str = None, mw=None, rep_type: str = 'pdf', **kw): # dict of {df_name: {func: func_definition, da: **da, df=None}} dfs, charts, sections, exec_summary, style_funcs = {}, {}, {}, {}, {} signatures = [] self.html_template = 'report_template.html' dfs_loaded = False p_rep = None if d is None: d = dt.now() + delta(days=-31) if d_rng is None: d_rng = qr.first_last_month(d=d) # make sure everything is date not datetime if isinstance(d_rng[0], dt): d_rng = (d_rng[0].date(), d_rng[1].date()) # don't use current ytd until first monthly report end of jan cur_year = dt.now().year d = dt(cur_year, 1, 1) d_end_jan = qr.first_last_month(d)[1].date() if d_rng[1] < d_end_jan: d_rng_ytd = (dt(cur_year - 1, 1, 1), dt(cur_year - 1, 12, 31)) else: d_rng_ytd = (dt(cur_year, 1, 1).date(), d_rng[1]) include_items = dict(title_page=False, truck_logo=False, exec_summary=False, table_contents=False, signature_block=False) env = Environment(loader=FileSystemLoader(str(p_reports))) f.set_self(vars())
def df_open_fc_unit(self, df=None, unit=None): """Filter df to open FCs per unit - Allow passing in df so don't need to query (if comes from cached db df)""" if df is None: df = self.df cols = ['FC Number', 'Type', 'Subject', 'ReleaseDate', 'ExpiryDate'] return df \ .pipe(lambda df: df[df.Unit == unit])[cols] \ .assign( Age=lambda x: (dt.now() - x.ReleaseDate).dt.days, Remaining=lambda x: (x.ExpiryDate - dt.now()).dt.days) \ .pipe( f.sort_df_by_list, lst=['M', 'FAF', 'DO', 'FT'], lst_col='Type', sort_cols='FC Number') \ .reset_index(drop=True)
def df_weeks(): # Week cols = ['StartDate', 'EndDate', 'Name'] m = {} year = dt.now().year for wk in range(1, 53): s = f'2020-W{wk-1}' d = dt.strptime(s + '-1', '%Y-W%W-%w').date() m[f'{year}-{wk}'] = (d, d + delta(days=6), f'Week {wk}') return pd.DataFrame.from_dict(m, columns=cols, orient='index')
def df_months(): # Month cols = ['StartDate', 'EndDate', 'Name'] d_start = dt.now() + delta(days=-365) d_start = dt(d_start.year, d_start.month, 1) m = {} for i in range(24): d = d_start + relativedelta(months=i) name = f'{d:%Y-%m}' m[name] = (*first_last_month(d), name) return pd.DataFrame.from_dict(m, columns=cols, orient='index')
def recent_weekday(d: str) -> dt: """Get recent date from weekday Parameters ---------- d : str day of week (eg 'fri') Returns ------- dt date value """ df = pd.DataFrame( data=pd.date_range(dt.now() + delta(days=-7), dt.now()), columns=['date']) \ .assign( day=lambda x: x.date.dt.strftime('%a'), date=lambda x: x.date.dt.date) \ .set_index('day') return df.date.loc[d.title()]
def fix_dls_all_units(d_lower: dt = None) -> None: if d_lower is None: d_lower = dt.now() + delta(days=-30) units = utl.all_units() # collect dsc files from all units in parallel result = Parallel(n_jobs=-1, verbose=11)(delayed(utl.process_files)( ftype='dsc', units=unit, d_lower=d_lower, parallel=False) for unit in units)
def __init__(self, date=None, calendar=True, *args, **kw): super().__init__(*args, **kw) editor_format = 'yyyy-MM-dd' display_format = '%Y-%m-%d' # not sure if used self.setCalendarPopup(calendar) self.setDisplayFormat(editor_format) if date is None: date = dt.now().date() if isinstance(date, dt): date = date.date() self.setDate(date)
def df_rolling_n_months(n: int = 12): """Create df of n rolling months with periodindex Parameters ---------- n : int, optional n months, default 12 """ d_upper = last_day_month(dt.now() + relativedelta(months=-1)) d_lower = d_upper + relativedelta(months=(n - 1) * -1) idx = pd.date_range(d_lower, d_upper, freq='M').to_period() return pd.DataFrame(data=dict(period=idx.astype(str)), index=idx) \ .assign( d_lower=lambda x: x.index.to_timestamp(), d_upper=lambda x: x.d_lower + pd.tseries.offsets.MonthEnd(1))
def __init__(self): __name__ = 'SMS Event Log Database' log.info('Initializing database') self.reset(False) df_unit = None df_fc = None df_component = None dfs = {} domain_map = dict(SMS='KOMATSU', Cummins='CED', Suncor='NETWORK') domain_map_inv = f.inverse(m=domain_map) last_internet_success = dt.now() + delta(seconds=-61) f.set_self(vars()) self.expected_exceptions = []
def __init__(self, days=14, use_user_settings=False, **kw): download_dir = cf.p_drive / 'Regional/SMS West Mining/PSN/PSNs' super().__init__(use_user_settings=use_user_settings, download_dir=download_dir, **kw) startdate = (dt.now() + delta(days=days * -1)).strftime('%m/%d/%Y') self.pages.update({ 'login': '******', }) f.set_self(vars())
def __init__(self, unit, d_rng=None, **kw): super().__init__(**kw) a = T('UnitSMR') if d_rng is None: d_upper = dt.now() d_lower = d_upper + delta(days=-60) d_rng = (d_lower, d_upper) cols = ['Unit', 'DateSMR', 'SMR'] q = Query.from_(a) \ .where(a.Unit == unit) \ .where(a.DateSMR.between(d_rng[0], d_rng[1])) f.set_self(vars())
def __init__(self, recent_days=-120, da=None): super().__init__(da=da) a, b = self.a, self.b # subquery for ordering with row_number c = Query.from_(a).select( a.star, (RowNumber() .over(a.unit, a.component_id, a.modifier) .orderby(a.sample_date, order=Order.desc)).as_('rn')) \ .left_join(b).on_field('Unit') \ .where(a.sample_date >= dt.now() + delta(days=recent_days)) \ .as_('sq0') cols = [c.star] sq0 = c f.set_self(vars())
def get_df_sum(self, n: int = 10): """Return df of summary durations for last n dates""" df_sum = self.get_df(name='ActLog') \ .groupby('date', as_index=False)[['date', 'day', 'sum']].first() \ .set_index('date') d = dt.now().date() rng = pd.date_range(d + delta(days=-n), d) return pd.DataFrame(index=rng) \ .rename_axis('date') \ .merge(right=df_sum, left_index=True, right_index=True, how='left') \ .reset_index() \ .assign( day=lambda x: x.date.dt.strftime('%a'), sum=lambda x: x['sum'].fillna(0).astype(float))
def max_date_plm(unit: str) -> dt: """Get max date in PLM database for specific unit Parameters ---------- unit : str Returns ------- dt max date """ query = PLMUnit(unit=unit) maxdate = query.max_date() if maxdate is None: maxdate = dt.now() + delta(days=-731) return maxdate
def setModelData(self, editor, model, index): editor_date = getattr(editor, self.date_type)() if isinstance(self, TimeDelegate): # get date from DateAdded index_dateadded = index.siblingAtColumn( model.get_col_idx('Date Added')) d1 = model.data(index=index_dateadded, role=TableDataModel.RawDataRole) if d1 is None: d1 = dt.now() t = QTime(editor_date).toPyTime() d = dt(d1.year, d1.month, d1.day, t.hour, t.minute) else: # d = QDateTime(editor_date).toPyDateTime() d = f.convert_date(editor_date.toPyDate()) model.setData(index, d)
def build_url(self, **kw): login = self.login url = 'https://mylab2.fluidlife.com/mylab/api/history/jsonExport?' if not 'd_lower' in kw: kw['d_lower'] = dt.now() + delta(days=-14) # convert to suncor unit names if 'unit' in kw: customer = db.get_unit_val(unit=kw['unit'], field='Customer') if customer == 'Suncor': m = {'^F': 'F0', '^3': '03', '^2': '02'} for expr, repl in m.items(): kw['unit'] = re.sub(expr, repl, kw['unit']) # convert easier kws to fluidlife kws m_conv = dict(d_lower='startDateTime', d_upper='endDateTime', minesite='customerName', component='componentType', unit='unitId') # NOTE unitId doesn't actually work m = dict(username=login['username'], password=login['password']) kw.update(m) for k, v in kw.items(): if isinstance(v, (dt)): v = self.format_date(v) # convert MineSite to Fluidlife customer_name if v in m_customer.keys(): v = m_customer[v] if isinstance(v, list): v = v[0] if k in m_conv: k = m_conv[k] ampersand = '&' if not url[-1] == '?' else '' url = f'{url}{ampersand}{k}={v}' return url
def get_df_fc(self, minesite=None, unit=None, default=True) -> pd.DataFrame: name = 'fc' df = self.get_df_saved(name) if df is None: from smseventlog.queries import FCOpen df = FCOpen().get_df(default=False) self.save_df(df, name) if not minesite is None: df = df[df.MineSite == minesite] if not unit is None: df = df[df.Unit == unit] # kinda sketch to filter here if default: df = df[ ((df.Type == 'M') | (df.ExpiryDate >= dt.now())) & (df.Complete == False)] # noqa (needs to be ==False) return df
def highlight_expiry_dates(s, theme='light'): """Highlight FC Dates approaching expiry Parameters --------- s : pd.Series Only fmt single column at a time for now theme : str Dark or light theme for app or reports """ m = cf.config['color'] bg, t = m['bg'], m['text'] s1 = pd.Series(index=s.index) # blank series s_days_exp = (dt.now() - s).dt.days # days as int btwn now and date in column # filter column where date falls btwn range s1[s_days_exp.between(-90, -30)] = format_cell(bg['lightyellow'], 'black') s1[s_days_exp.between(-30, 0)] = format_cell(bg['lightorange'], 'black') s1[s_days_exp > 0] = format_cell(bg['lightred'], 'white') s1[s1.isnull()] = format_cell(*get_defaults(theme)) # default for everything else return s1