def load_demand(self): """ Generate a dataframe with materials' demand :param engine: DB connection :return: pd.DataFrame with demand history """ # Start Timer tic = time.time() message = "Loading Demand Data..." # ------------------------------------------------------------------------------------------------------------------ # Query: demand query = """select * from p02_daily_demand_almacen where centro not like 'SE%%';""" df_demand = create_query(query, self.conn) # Convert date column to datetime df_demand["fecha"] = pd.to_datetime(df_demand["fecha"]) # Prepare data df_demand["cram"] = df_demand["centro"].map(lambda x: x[2:]) # Reduce demand to bare minimum df_demand = df_demand.groupby(["fecha", "material", "cram"])["cantidad"].sum().reset_index() # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message) return df_demand
def generate_total_stock(materials_by_id, first_date, last_date): tic = time.time() message = "Generating exportable dataframe..." date = [] warehouse = [] amount = [] mat = [] # Add all info to the respective lists for material in materials_by_id.values(): for k, v in material.stock.items(): if first_date <= k[0] <= last_date: date.append(k[0]) warehouse.append(k[1]) amount.append(v) mat.append(material.catalog) # Convert to DataFrame df = pd.DataFrame({'Material': mat, 'Fecha': date, 'Almacén': warehouse, 'Cantidad': amount}) toc = time.time() elapsed_time_message(toc - tic, message) return df
def process_initial_stock_teco(self, string_date): # Start Timer tic = time.time() message = "Set starting stock..." # ------------------------------------------------------------------------------------------------------------------ # Query: initial stock (matching the string_date) query = f"""SELECT material, fecha, centro, stock FROM p02_stock_by_date where fecha = '{string_date}' and centro like 'C%%';""" starting_stock_df = create_query(query, self.conn) if self.grep_type == 'unicram': starting_stock_df['centro'] = 'C00' # Set default initial stock as 0 for geu in self.geus_by_id.values(): geu.set_zero_stock(self.warehouse_list) # Set starting stock for the given initial date for index, row in starting_stock_df.iterrows(): if row['material'] in self.materials_by_id.keys(): self.materials_by_id[row['material']].geu.set_starting_stock_from_row(row, is_teco_simulation=True) # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def load_movements(materials_by_id, conn, table_sql_movements, movements): tic = time.time() message = "Leyendo movimientos..." # Processed table if table_sql_movements == 'p02': df_movements = create_query("""SELECT * FROM p02_total_historic_movements""", conn) # Materials must be in scope df_movements = df_movements[df_movements["material"].isin(list(materials_by_id.keys()))] # If applies, use movements list to filter movements in df_movements if movements: df_movements = df_movements[df_movements["clase_de_movimiento"].isin(movements)] # Convert date column to datetime df_movements["fecha_de_documento"] = pd.to_datetime(df_movements["fecha_de_documento"]) # Reduce movements to bare minimum df_movements = df_movements.groupby(["material", "fecha_de_documento", "almacen"])[ "cantidad"].sum().reset_index() # Raw table if table_sql_movements == 'r00': # Query df_movements = create_query("""SELECT material, centro, almacen, fe_contabilizacion, ctd_en_um_entrada, clase_de_movimiento FROM r00_total_historic_movements""", conn) df_movements.columns = ["material", "centro", "almacen", "fecha_de_documento", "cantidad", "clase_de_movimiento"] # Materials must be in scope df_movements = df_movements[df_movements["material"].isin(list(materials_by_id.keys()))] # Almacen cant be empty df_movements = df_movements[df_movements["almacen"] != ""] # Rename almacen with its centro df_movements["almacen"] = df_movements["centro"] + '-' + df_movements["almacen"] # Convert date column to datetime and quantity column to float df_movements["fecha_de_documento"] = pd.to_datetime(df_movements["fecha_de_documento"]) df_movements["cantidad"] = df_movements["cantidad"].astype('float64') # If applies, use movements list to filter movements in df_movements if movements: df_movements = df_movements[df_movements["clase_de_movimiento"].isin(movements)] # Reduce movements to bare minimum df_movements = df_movements.groupby(["material", "fecha_de_documento", "almacen"])[ "cantidad"].sum().reset_index() toc = time.time() elapsed_time_message(toc - tic, message) return df_movements
def adding_movements_to_stock(materials_by_id, df_movements, first_date, last_date): tic = time.time() message = "Adding movements..." # If the date is between the corresponding date limits, # the movement and the corresponding warehouses are added to the material attributes [materials_by_id[material].set_movement(date, warehouse, amount) for material, warehouse, date, amount in df_movements[["material", "almacen", "fecha_de_documento", "cantidad"]].values if first_date <= date <= last_date] toc = time.time() elapsed_time_message(toc - tic, message)
def process_movements(self, df_movements): # Start Timer tic = time.time() message = "Set Historic Movements..." # ------------------------------------------------------------------------------------------------------------------ [self.materials_by_id[material].set_movements_for_teco_simulation(to_datetime(date), cram, amount) for material, date, cram, amount in df_movements.values if material in self.materials_by_id.keys()] # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def process_demand(self, df_demand): """ Adding Material's demand :param df_demand: :param materials_by_id: :return: """ # Start Timer message = "Processing Demand..." tic = time.time() # ------------------------------------------------------------------------------------------------------------------ [self.materials_by_id[material].add_demand(to_datetime(date), warehouse, demand) for date, material, warehouse, demand in df_demand.values if material in self.materials_by_id] # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def generate_stock_zero(df_zero_stock, materials_by_id, zero_stock_date=dt.datetime(2020, 1, 2)): tic = time.time() message = "Assigning zero stock..." # Aggregate df stock by material & almacen group_by_mat_alm = df_zero_stock.groupby(["material", "almacen"])["cantidad"].sum().reset_index() # Adds corresponding stock as an attribute of the material [materials_by_id[material].set_stock(zero_stock_date, warehouse, amount) for material, warehouse, amount in group_by_mat_alm[["material", "almacen", "cantidad"]].values] # Set value for each material & warehouse combination for material in materials_by_id: for warehouse in materials_by_id[material].warehouses: try: materials_by_id[material].stock[(zero_stock_date, warehouse)] except: materials_by_id[material].set_stock(zero_stock_date, warehouse, 0) toc = time.time() elapsed_time_message(toc - tic, message)
def load_zero_stock(materials_by_id, conn): tic = time.time() message = "Loading zero stock..." # Query: load stock query = """SELECT * FROM p01_stock_02_01_2020""" df_zero_stock = create_query(query, conn) # Must be in scope list_of_materials_in_scope = list(materials_by_id.keys()) df_zero_stock = df_zero_stock[df_zero_stock["material"].isin(list_of_materials_in_scope)] # Almacen cant be empty df_zero_stock = df_zero_stock[df_zero_stock["almacen"] != ""] # Rename almacen with its centro df_zero_stock["almacen"] = df_zero_stock["centro"] + '-' + df_zero_stock["almacen"] toc = time.time() elapsed_time_message(toc - tic, message) return df_zero_stock
def calculate_stock_per_date(materials_by_id, first_date, last_date): tic = time.time() message = "Generando stock por día..." # Hardcoded stock date reference date = dt.datetime(2020, 1, 2) # Iterate through dates and correctly set material stock according to previous stock and new movements if last_date >= date: while date != last_date: for material in materials_by_id.values(): for warehouse in material.warehouses: try: amount = material.movements[(date, warehouse)] except: amount = 0 date_stock = material.get_stock(date, warehouse) + amount material.set_stock(date + dt.timedelta(days=1), warehouse, date_stock) date = date + dt.timedelta(days=1) # Hardcoded stock date reference date = dt.datetime(2020, 1, 2) # Iterate through dates and correctly set material stock according to previous stock and new movements if first_date <= date: while date != first_date: for material in materials_by_id.values(): for warehouse in material.warehouses: try: amount = material.movements[(date, warehouse)] except: amount = 0 date_stock = material.get_stock(date, warehouse) - amount material.set_stock(date - dt.timedelta(days=1), warehouse, date_stock) date = date - dt.timedelta(days=1) toc = time.time() elapsed_time_message(toc - tic, message)
def process_movements(self, movements): # Start Timer tic = time.time() message = "Set Historic Movements..." # ------------------------------------------------------------------------------------------------------------------ greps_banned = [ wh for wh in list(movements['ce_alm_s44'].unique()) if wh not in self.data_class.relation_grep_wh.keys() ] # if greps_banned: # st.warning( # f"Las siguientes relaciones de centro-almacén no se encontraron en la base de datos relacion_grep_almacen: {greps_banned}. " # f"No serán tenidos en cuenta. Para que sean incluidos se deben agregar dichas relaciones con su grep correspondiente en relacion_grep_alamcen") df_movements = movements[~movements['ce_alm_s44'].isin(greps_banned)] df_movements['ce_alm_s44'] = df_movements['ce_alm_s44'].map( lambda x: self.data_class.relation_grep_wh[x]) # Load and process movements df_movements = df_movements.groupby( ["material", "fecha", "ce_alm_s44"])["cantidad"].sum().reset_index() # Convert date column to datetime df_movements["fecha"] = pd.to_datetime(df_movements["fecha"]) df_movements = df_movements[df_movements["fecha"] > self.starting_date] [ self.materials_by_id[material].set_movements_for_teco_simulation( to_datetime(date), cram, amount) for material, date, cram, amount in df_movements.values if material in self.materials_by_id.keys() ] # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def process_tickets_teco(self): """ Set GEU tickets for simulation tests :param engine: DB connection :param materials_by_id: {id, Material} :param starting_date: dt.date :return: void -> Complete GEU object info """ # Start Timer tic = time.time() message = "Set tickets for sim test..." # ------------------------------------------------------------------------------------------------------------------ # Load tickets query = f"""select * from p02_tickets_by_cram;""" df_tickets = create_query(query, self.conn) if self.grep_type == 'unicram': df_tickets["cram"] = 'C00' # Convert date columns to datetime df_tickets["fecha"] = pd.to_datetime(df_tickets["fecha"]) # Add tickets demands for greps without movements demands [self.materials_by_id[str(material)].add_demand(to_datetime(date), cram, int(amount)) for date, material, amount, cram in df_tickets[["fecha", "material", "cantidad", "cram"]].values if str(material) in self.materials_by_id.keys() and cram not in set([cram for date, cram, amount in self.materials_by_id[str(material)].demands])] # Create Ticket class df_tickets = df_tickets[["fecha", "material", "cantidad", "cram"]] df_tickets.columns = ["fecha", "material", "cantidad", "grep"] [Ticket(row, self.materials_by_id[str(row["material"])]) for iter, row in df_tickets.iterrows() if str(row["material"]) in self.materials_by_id.keys()] # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def load_movements(self): # Start Timer tic = time.time() message = "Load Historic Movements..." # ------------------------------------------------------------------------------------------------------------------ # Query: load movements query = """SELECT * FROM p01_historic_movements_for_teco_simulation;""" df_movements = create_query(query, self.conn) # Preparing data df_movements["cram"] = df_movements["centro"].map(lambda x: x[2:]) if self.grep_type=='unicram': df_movements["cram"] = '00' # Reduce movements to bare minimum df_movements = df_movements.groupby(["material", "fecha_de_documento", "cram"])["cantidad"].sum().reset_index() # Convert date column to datetime df_movements["fecha_de_documento"] = pd.to_datetime(df_movements["fecha_de_documento"]) # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message) return df_movements
def process_initial_stock_teco(self, initial_stock, string_date): # Start Timer tic = time.time() message = "Set starting stock..." # ------------------------------------------------------------------------------------------------------------------ # Process stock & tickets greps_banned = [ wh for wh in list(initial_stock['ce_alm_s44'].unique()) if wh not in self.data_class.relation_grep_wh.keys() ] # if greps_banned: # st.warning( # f"Las siguientes relaciones de centro-almacén no se encontraron en la base de datos relacion_grep_almacen: {greps_banned}. " # f"No serán tenidos en cuenta. Para que sean incluidos se deben agregar dichas relaciones con su grep correspondiente en relacion_grep_alamcen") df_initial_stock = initial_stock[~initial_stock['ce_alm_s44']. isin(greps_banned)] df_initial_stock['ce_alm_s44'] = df_initial_stock['ce_alm_s44'].map( lambda x: self.data_class.relation_grep_wh[x]) df_initial_stock.columns = ['material', 'fecha', 'stock', 'grep'] # Set default initial stock as 0 for geu in self.geus_by_id.values(): geu.set_zero_stock(self.warehouse_list) # Set starting stock for the given initial date for index, row in df_initial_stock.iterrows(): if row['material'] in self.materials_by_id.keys(): self.materials_by_id[ row['material']].geu.set_starting_stock_from_row( row, is_teco_simulation=True) # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message)
def generate_exportables_dataframes(self): # Start Timer message = "Generating exportable file..." tic = time.time() # ------------------------------------------------------------------------------------------------------------------ # Initiate information lists today = self.starting_date stock = [] dates = [] geus = [] cram = [] cluster = [] #exportables_geus = [geu for geu in self.geus_by_id.values() if geu.tickets] # Fill info lists last = False while today != self.end_date and not last: if today >= self.end_date: today = self.end_date - dt.timedelta(days=1) last = True for geu in self.geus_by_id.values(): for wh in geu.stock_by_wh.keys(): dates.append(today) stock.append(geu.stock_by_day[today][wh]) geus.append(geu.id) cram.append(wh) cluster.append(geu.cluster) today += dt.timedelta(days=15) # Convert lists to DataFrame df_stocks = pd.DataFrame({ 'geu': geus, 'fecha': dates, 'centro': cram, 'stock': stock, 'cluster': cluster }) # ------------------Tickets------------------ # Initiate information lists is_closed = [] is_broken = [] service_level = [] dates = [] geus = [] cram = [] cluster = [] # Fill info lists for geu in self.geus_by_id.values(): for t in geu.tickets_for_service_level: dates.append(t.ticket_date) is_closed.append(int(t.is_closed)) is_broken.append(t.is_broken) service_level.append(int(t.is_closed and not t.is_broken)) geus.append(geu.id) cram.append(t.grep) cluster.append(geu.cluster) # Convert lists to DataFrame df_tickets = pd.DataFrame({ 'geu': geus, 'fecha': dates, 'centro': cram, 'Quebrado': is_broken, 'Cerrado': is_closed, 'Nivel_de_servicio': service_level, 'cluster': cluster }) df_tickets['fecha'] = pd.to_datetime(df_tickets['fecha']) df_stocks['fecha'] = pd.to_datetime(df_stocks['fecha']) # ------------------Exports------------------ # Export information to csv #my_path = os.path.join(os.path.dirname(__file__), 'dashboard_csv/') # df_stocks.to_csv(f'{my_path}simulate_stock_teco.csv') #df_tickets.to_csv(f'{my_path}simulate_tickets_teco.csv') # ------------------------------------------------------------------------------------------------------------------ # Stop Timer toc = time.time() elapsed_time_message(toc - tic, message) return {'tickets': df_tickets, 'stock': df_stocks}