def __init__(self, **kw): super().__init__(**kw) a = self.select_table isNumeric = cfn('ISNUMERIC', ['val']) left = cfn('LEFT', ['val', 'num']) c, d = pk.Tables('UnitSMR', 'EquipType') days = fn.DateDiff(PseudoColumn('day'), a.DeliveryDate, fn.CurTimestamp()) remaining = Case().when(days <= 365, 365 - days).else_(0).as_('Remaining') remaining2 = Case().when(days <= 365 * 2, 365 * 2 - days).else_(0) ge_remaining = Case().when(isNumeric(left(a.Model, 1)) == 1, remaining2).else_(None).as_('GE_Remaining') b = c.select(c.Unit, fn.Max(c.SMR).as_('CurrentSMR'), fn.Max(c.DateSMR).as_('DateSMR')).groupby(c.Unit).as_('b') cols = [a.MineSite, a.Customer, d.EquipClass, a.Model, a.Serial, a.Unit, b.CurrentSMR, b.DateSMR, a.DeliveryDate, remaining, ge_remaining] q = Query.from_(a) \ .left_join(b).on_field('Unit') \ .left_join(d).on_field('Model') \ .orderby(a.MineSite, a.Model, a.Unit) f.set_self(vars()) # NOTE lots of duplication with this pattern btwn avail/ac inspect/units/comp co # can't remember how everything works and don't want to dig into it self.stylemap_cols |= {'Model': dict( cols=['Model'], func=st.pipe_highlight_alternating, da=dict( subset=['Model'], color='maroon', theme=self.theme))}
def get_df_unit(self, minesite=None, model=None, force=False, **kw): """Return df of all units in database""" name = 'units' df = self.get_df_saved(name) # load if doesn't exist if df is None or force: a, b = pk.Tables('UnitID', 'EquipType') cols = [a.MineSite, a.Customer, a.Model, a.Unit, a.Serial, a.DeliveryDate, b.EquipClass, b.ModelBase] q = Query.from_(a).select(*cols) \ .left_join(b).on_field('Model') df = pd.read_sql(sql=q.get_sql(), con=self.engine) \ .set_index('Unit', drop=False) \ .pipe(f.parse_datecols) self.save_df(df, name) # sometimes need to filter other minesites due to serial number duplicates if not minesite is None: df = df[df.MineSite == minesite].copy() if not model is None: df = df[df.Model.str.contains(model)] return df
def __init__(self, theme='dark'): super().__init__(theme=theme) a, b = pk.Tables('viewFactoryCampaign', 'UnitID') cols = [a.FCNumber, a.Unit, b.MineSite, a.Subject, a.Complete, a.Classification, a.ReleaseDate, a.ExpiryDate] q = Query.from_(a).select(*cols) \ .left_join(b).on_field('Unit') f.set_self(vars())
def __init__(self, da=None, **kw): super().__init__(da=da, **kw) a, b = pk.Tables('Downtime', 'UnitID') q = Query.from_(a) \ .inner_join(b).on_field('Unit') self.default_dtypes.update( **f.dtypes_dict('float64', ['Total', 'SMS', 'Suncor'])) f.set_self(vars())
def __init__(self, d: dt, minesite='FortHills', **kw): super().__init__(**kw) a, b = pk.Tables('UnitID', 'UnitSMR') d_lower = dt(d.year, d.month, 1) dates = (d_lower, d_lower + relativedelta(months=1) ) # (2020-12-01, 2021-01-01) cols = [a.Unit, b.DateSMR, b.SMR] q = Query.from_(a).select(*cols) \ .left_join(b).on_field('Unit') \ .where((a.MineSite == minesite) & (b.DateSMR.isin(dates) & (a.ExcludeMA.isnull()))) f.set_self(vars())
def __init__(self, unit=None, **kw): super().__init__(**kw) a, b = pk.Tables('UnitSMR', 'UnitID') _year = cfn('YEAR', ['date']) _month = cfn('MONTH', ['date']) year = _year(a.DateSMR) month = _month(a.DateSMR) _period = fn.Concat(year, '-', month) cols = [a.Unit, _period.as_('Period'), fn.Max(a.SMR).as_('SMR')] q = Query.from_(a) \ .left_join(b).on_field('Unit') \ .where(a.Unit == unit) \ .groupby(a.Unit, _period) f.set_self(vars())
def __init__(self, da=None, **kw): super().__init__(da=da, **kw) a = self.select_table b, c, d, e = pk.Tables('FCSummary', 'FCSummaryMineSite', 'UnitID', 'EquipType') self.default_dtypes.update( **f.dtypes_dict('Int64', ['SMR', 'Pics'])) self.formats.update({ 'SMR': '{:,.0f}', 'Pics': '{:,.0f}'}) q = Query.from_(a) \ .left_join(b).on_field('FCNumber') \ .left_join(d).on(d.Unit == a.Unit) \ .left_join(c).on((c.FCNumber == a.FCNumber) & (c.MineSite == d.MineSite)) \ .left_join(e).on(d.Model == e.Model) f.set_self(vars())
def __init__(self, d_rng: Tuple[dt, dt], minesite: str): super().__init__() self.use_cached_df = True a, b = pk.Tables('EventLog', 'UnitID') date = a.DateAdded.as_('Failure Date') cols = [date, a.TSINumber, a.Unit, b.Model, a.Title, a.SMR, a.ComponentSMR, a.PartNumber, a.FailureCause] q = Query.from_(a) \ .select(*cols) \ .left_join(b).on_field('Unit') \ .where(a.StatusTSI == 'Closed') \ .where(a.DateTSISubmission.between(*d_rng)) \ .where(b.MineSite == minesite) \ .where(~a.Title.like('fc %')) \ .orderby(a.DateAdded, a.Unit) f.set_self(vars())
def q(self): d_rng, period, model, minesite, unit = self.d_rng, self.period, self.model, self.minesite, self.unit a, b = pk.Tables('Downtime', 'UnitID') hrs_in_period = cfn('tblHrsInPeriod', ['d_lower', 'd_upper', 'minesite', 'period']) period_range = cfn('period_range', ['startdate', 'enddate', 'period']) _month = cfn('MONTH', ['date']) _year = cfn('YEAR', ['date']) iso_year = cfn('dbo.iso_year', ['date']) datepart = cfn('DATEPART', ['period_type', 'date']) month = _month(a.ShiftDate) week = datepart(PseudoColumn('iso_week'), a.ShiftDate) if period == 'month': year = _year(a.ShiftDate) _period = fn.Concat(year, '-', month) # .as_('period') else: year = iso_year( a.ShiftDate ) # only use iso_year (slow custom function) when grouping by week _period = fn.Concat(year, '-', week) # .as_('period') # Create all week/month periods in range crossed with units q_prd = Query.from_(period_range(d_rng[0], d_rng[1], period)).select('period') q_base = Query.from_(b) \ .select(q_prd.period, b.Unit) \ .cross_join(q_prd).cross() \ .where(Criterion.all([ b.MineSite == minesite, b.model.like(model)])) # Unit, Total, SMS, Suncor cols_dt = [ _period.as_('period'), a.Unit, fn.Sum(a.Duration).as_('Total'), fn.Sum(a.SMS).as_('SMS'), fn.Sum(a.Suncor).as_('Suncor') ] q_dt = Query.from_(a) \ .select(*cols_dt) \ .where(Criterion.all([ a.ShiftDate.between(d_rng[0], d_rng[1]), a.Duration > 0.01])) \ .groupby(a.Unit, _period) # in case need historical data for single unit if not unit is None: q_dt = q_dt.where(a.Unit == unit) q_base = q_base.where(b.Unit == unit) cols1 = [q_base.period, q_base.Unit, q_dt.Total, q_dt.SMS, q_dt.Suncor] q1 = Query.from_(q_base) \ .select(*cols1) \ .left_join(q_dt).on_field('Unit', 'Period') q_hrs = Query.from_(hrs_in_period(d_rng[0], d_rng[1], minesite, period)).select('*') cols = [ b.Model, b.DeliveryDate, q1.star, q_hrs.ExcludeHours_MA, q_hrs.ExcludeHours_PA, Case().when(b.AHSActive == 1, 'AHS').else_('Staffed').as_('Operation') ] return Query.from_(q1) \ .select(*cols) \ .left_join(b).on_field('Unit') \ .left_join(q_hrs).on_field('Unit', 'Period') \ .where(b.Model.like(model))
def insert_update( self, a: str, df: pd.DataFrame, join_cols: list = None, b: str = 'temp_import', **kw) -> int: """Insert values from df into temp update table b and merge to a Parameters ---------- a : str insert into table b : str select from table (temp table) join_cols : str colums to join a/b on df : pd.DataFrame Returns ------- int rows added """ if b == 'temp_import': kw['if_exists'] = 'replace' if join_cols is None: from smseventlog import dbtransaction as dbt join_cols = dbt.get_dbtable_keys(dbtable=a) imptable = b # drop duplicates if not df is None and len(df) > 0: # sometimes df will have been converted to lower cols join_cols_lower = [c.lower() for c in join_cols] subset = join_cols if not all(c in df.columns for c in join_cols_lower) else join_cols_lower df = df \ .drop_duplicates(subset=subset, keep='first') a, b = pk.Tables(a, b) cols = df.columns # this builds an import function from scratch, replaces stored proceedures q = Query.into(a) \ .columns(*cols) \ .from_(b) \ .left_join(a).on_field(*join_cols) \ .select(*cols) \ .where(a.field(join_cols[0]).isnull()) else: q = '' rowsadded = self.import_df(df=df, imptable=imptable, impfunc=str(q), import_name=a, **kw) # self.cursor.execute(f'TRUNCATE TABLE {b};') self.cursor.execute(f'DROP TABLE {b};') self.cursor.commit() msg = f'{a}: {rowsadded}' log.info(msg) return rowsadded