def _os_specific_uri(action_case: str, my_os: list, type_case: str) -> str: """ Constructs os specific uri. Args: action_case: The action base indicates wheter the connection is for retrieving from or creating a database my_os: The os the user is running the library on. type_case: Indicates wether the uri is for the csv management database or the reporting database Returns: """ app_config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) man_config = deserialise_config(ABS_FILE_PATH_CSV_MANAGEMENT_CONFIG) db_uri = '' unix, windows = [case['POSIX'], case['MACINTOSH']], [case['NT']] if action_case == case['RETRIEVAL'] and type_case == case['REPORTING']: if os.name in unix: db_uri = 'sqlite:///{}/reporting.db'.format( app_config['database_path']) elif os.name in windows: db_uri = 'sqlite:///{}\\reporting.db'.format( app_config['database_path']) return db_uri elif action_case == case['RETRIEVAL'] and type_case == case['MANAGEMENT']: if os.name in unix: db_uri = 'sqlite:///{}/{}'.format(man_config['database_path'], man_config['database_name']) elif os.name in windows: db_uri = 'sqlite:///{}\\{}'.format(man_config['database_path'], man_config['database_name']) return db_uri elif action_case == case['CREATE'] and type_case == case['MANAGEMENT']: database_name = 'csv_management-{}.db'.format(uuid.uuid1()) if os.name in unix: db_uri = 'sqlite:///{}/{}'.format(ABS_FILE_PATH_ARCHIVE, database_name) elif os.name in windows: db_uri = 'sqlite:///{}\\{}'.format(ABS_FILE_PATH_ARCHIVE, database_name) config = { 'database_path': ABS_FILE_PATH_ARCHIVE, 'database_name': database_name, 'database_connection': db_uri } serialise_config(configuration=config, file_path=ABS_FILE_PATH_CSV_MANAGEMENT_CONFIG) return db_uri
def run(self): config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) config['port'] = self.port config['host'] = self.host serialise_config(config, ABS_FILE_PATH_APPLICATION_CONFIG) # print(self.port) self.app.run(host=self.host, port=int(self.port))
def run_sku_recommendation(analysed_orders: UncertainDemand, forecast: dict) -> dict: """ Runs SKU recommendation state machine and generates recommendations for each sku. Args: analysed_orders (UncertainDemand): Analysed Orders. forecast (dict): forecast. Returns: dict: Recommendations for each sku. """ recommend = SkuMachine() states = SKUStates(analysed_orders=analysed_orders, forecast=forecast) recommend.add_state("start", states.initialise_machine) recommend.add_state("excess_rank", states.excess_rank) recommend.add_state("shortage_rank", states.shortage_rank) recommend.add_state("inventory_turns", states.inventory_turns) recommend.add_state("classification", states.classification) recommend.add_state("traffic_light", states.traffic_light) recommend.add_state("forecast", states.forecast) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") for sku in analysed_orders: recommend.run(sku.sku_id) return deserialise_config(ABS_FILE_PATH['RECOMMENDATION_PICKLE'])
def run_sku_recommendation(analysed_orders:UncertainDemand, forecast: dict)->dict: """ Runs SKU recommendation state machine and generates recommendations for each sku. Args: analysed_orders (UncertainDemand): Analysed Orders. forecast (dict): forecast. Returns: dict: Recommendations for each sku. """ recommend = SkuMachine() states = SKUStates(analysed_orders=analysed_orders, forecast=forecast) recommend.add_state("start", states.initialise_machine) recommend.add_state("excess_rank", states.excess_rank) recommend.add_state("shortage_rank", states.shortage_rank) recommend.add_state("inventory_turns", states.inventory_turns) recommend.add_state("classification", states.classification) recommend.add_state("traffic_light", states.traffic_light) recommend.add_state("forecast", states.forecast) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") for sku in analysed_orders: recommend.run(sku.sku_id) return deserialise_config(ABS_FILE_PATH['RECOMMENDATION_PICKLE'])
def run_profile_recommendation(analysed_orders: UncertainDemand, forecast: dict) -> dict: """ Runs profile recommendation state machine and generates recommendations for entire inventory profile. Args: analysed_orders (UncertainDemand): Analysed orders as UncertainDemand object forecast (dict): Forecast results for same data set. Returns: dict: Recommendations for entire inventory profile """ recommend = ProfileMachine() states = ProfileStates(analysed_orders=analysed_orders, forecast=forecast) recommend.add_state("start", states.initialise_machine) recommend.add_state("revenue", states.revenue) recommend.add_state("excess", states.excess) recommend.add_state("shortage", states.shortage) recommend.add_state("classification", states.classification) recommend.add_state("inventory", states.inventory_turns) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") recommend.run() # have to serialise the profile recommendation seperately from the sku recommendation return deserialise_config(ABS_FILE_PATH['PROFILE_PICKLE'])
def run(self): config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) config['port'] = self.port config['host'] = self.host serialise_config(config, ABS_FILE_PATH_APPLICATION_CONFIG) # print(self.port) app.run(host= self.host,port=self.port)
def setUp(self): self._states = ( 'EXCESS_RANK', 'SHORTAGE_RANK', 'TRAFFIC_LIGHT', 'CLASSIFICATION', 'FORECAST', 'RECOMMENDATION', 'INVENTORY_TURNS', 'START' ) self.orders_analysis = model_inventory.analyse(file_path=ABS_FILE_PATH['COMPLETE_CSV_SM'], z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type='csv', length=12, currency='USD') self.forecast = deserialise_config(ABS_FILE_PATH['FORECAST_PICKLE']) self.recommend = SkuMachine() self.states = SKUStates(analysed_orders=self.orders_analysis, forecast=self.forecast) self.recommend.add_state("start", self.states.initialise_machine) self.recommend.add_state("excess_rank", self.states.excess_rank) self.recommend.add_state("shortage_rank", self.states.shortage_rank) self.recommend.add_state("inventory_turns", self.states.inventory_turns) self.recommend.add_state("classification", self.states.classification) self.recommend.add_state("traffic_light", self.states.traffic_light) self.recommend.add_state("forecast", self.states.forecast) self.recommend.add_state("recommendation", self.recommend, end_state=1) self.recommend.set_start("start")
def _os_specific_uri(action_case: str, my_os: list, type_case: str) -> str: """ Constructs os specific uri. Args: action_case: The action base indicates wheter the connection is for retrieving from or creating a database my_os: The os the user is running the library on. type_case: Indicates wether the uri is for the csv management database or the reporting database Returns: """ app_config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) man_config = deserialise_config(ABS_FILE_PATH_CSV_MANAGEMENT_CONFIG) db_uri = '' unix, windows = [case['POSIX'], case['MACINTOSH']], [case['NT']] if action_case == case['RETRIEVAL'] and type_case == case['REPORTING']: if os.name in unix: db_uri = 'sqlite:///{}/reporting.db'.format(app_config['database_path']) elif os.name in windows: db_uri = 'sqlite:///{}\\reporting.db'.format(app_config['database_path']) return db_uri elif action_case == case['RETRIEVAL'] and type_case == case['MANAGEMENT']: if os.name in unix: db_uri = 'sqlite:///{}/{}'.format(man_config['database_path'], man_config['database_name']) elif os.name in windows: db_uri = 'sqlite:///{}\\{}'.format(man_config['database_path'], man_config['database_name']) return db_uri elif action_case == case['CREATE'] and type_case == case['MANAGEMENT']: database_name = 'csv_management-{}.db'.format(uuid.uuid1()) if os.name in unix: db_uri = 'sqlite:///{}/{}'.format(ABS_FILE_PATH_ARCHIVE, database_name) elif os.name in windows: db_uri = 'sqlite:///{}\\{}'.format(ABS_FILE_PATH_ARCHIVE, database_name) config = { 'database_path': ABS_FILE_PATH_ARCHIVE, 'database_name': database_name, 'database_connection': db_uri } serialise_config(configuration=config, file_path=ABS_FILE_PATH_CSV_MANAGEMENT_CONFIG) return db_uri
def simulation(runs: int = None): try: database_path = '' file_name = '' sim_results = [] sim_summary_results = [] inventory = [] if runs is not None: config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) database_path = config['database_path'] file_name = config['file'] file_path = database_path.replace( ' ', '') + '/' + (file_name.replace(' ', '')) analysed_orders = analyse(file_path=str(file_path), z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type=FileFormats.csv.name, length=12, currency='USD') # run the simulation, populate a database and then retrieve the most current values for the simulation page. try: with ThreadPoolExecutor(max_workers=1) as executor: sim = executor.submit(simulate.run_monte_carlo, orders_analysis=analysed_orders, runs=runs, period_length=12) sim_results = sim.result() sim_window = executor.submit(simulate.summarize_window, simulation_frame=sim_results, period_length=12) sim_window_result = sim_window.result() sim_summary = executor.submit(simulate.summarise_frame, sim_window_result) sim_summary_results = sim_summary.result() store_simulation(sim_summary_results) inventory = db.session.query(MasterSkuList).all() except OSError as e: print(e) elif runs is None and request.method == 'GET': try: sim_summary_results = select_last_simulation() inventory = db.session.query(MasterSkuList).all() except AttributeError as e: pass return flask.render_template('simulation.html', db=database_path, file_name=file_name, sim=sim_summary_results, runs=sim_results, inventory=inventory) except OSError: abort(404)
class ProdConfig(Config): PWD = os.path.abspath(os.curdir) DEBUG = False conf = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) db_path = conf.get('database_path', None) if db_path is not None and os.name in ['posix', 'mac']: SQLALCHEMY_DATABASE_URI = 'sqlite:///{}/reporting.db'.format(db_path) elif os.name == 'nt': SQLALCHEMY_DATABASE_URI = 'sqlite:///{}\\reporting.db'.format(db_path) SECRET_KEY = '|~G\xde\xa7\x9b\x1aKaZ-\xabk8\x0b\x12\xee)\xe0\xe0\x8b\x0c\xd9\x1d' SESSION_PROTECTION = 'strong' SQLALCHEMY_ECHO = False SQLALCHEMY_TRACK_MODIFICATIONS = True
def check_for_db(): """ Checks _archive directory for the presence of a csv_management database. If an csv_management database does not exist, then a new instance is created. Returns: bool: Returns False indicating a database does exist and there is not a need to create one, or True indicating a database does not exist and so one should be created. """ config = deserialise_config(ABS_FILE_PATH_CSV_MANAGEMENT_CONFIG) f = list([i.name for i in os.scandir(ABS_FILE_PATH_ARCHIVE)]) #print(f,'\n', config.get('database_name', 'UNKNOWN')) if config.get('database_name', 'UNKNOWN') in f: return False else: return True
def copy_file(): """Copies file used to load analysis and stores in archive folder for posterity. Returns: """ # Retrieve file_name and path to the source data csv config_file = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) file_name = config_file.get('file', 'UNKNOWN') file_path = config_file.get('database_path', 'UNKNOWN') print(file_path, file_name) if 'UNKNOWN' not in (file_path, file_name): try: # Copy data source file to archive copyfile('{}{}'.format(file_path, file_name), '{}/{}-{}.csv'.format(ABS_FILE_PATH_ARCHIVE, 'data_file', uuid.uuid1())) config_file['file_path'] = '{}/{}-{}.csv'.format(ABS_FILE_PATH_ARCHIVE, 'data_file', uuid.uuid1()) serialise_config(config_file, ABS_FILE_PATH_APPLICATION_CONFIG) except FileExistsError as e: print('The source file is not in the same location as the analysis database. Please place the file in ' 'the same location and restart the process.')
def setUp(self): self.STATES = ('EXCESS_RANK', 'SHORTAGE_RANK', 'TRAFFIC_LIGHT', 'CLASSIFICATION', 'FORECAST', 'RECOMMENDATION', 'INVENTORY_TURNS', 'START') self.orders_analysis = model_inventory.analyse(file_path=ABS_FILE_PATH['COMPLETE_CSV_SM'], z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12) self.forecast = deserialise_config(ABS_FILE_PATH['FORECAST_PICKLE']) self.recommend = SkuMachine() self.states = SKUStates(analysed_orders=self.orders_analysis, forecast=self.forecast) self.recommend.add_state("start", self.states.initialise_machine) self.recommend.add_state("excess_rank", self.states.excess_rank) self.recommend.add_state("shortage_rank", self.states.shortage_rank) self.recommend.add_state("inventory_turns", self.states.inventory_turns) self.recommend.add_state("classification", self.states.classification) self.recommend.add_state("traffic_light", self.states.traffic_light) self.recommend.add_state("forecast", self.states.forecast) self.recommend.add_state("recommendation", self.recommend, end_state=1) self.recommend.set_start("start")
def run_profile_recommendation(analysed_orders: UncertainDemand, forecast: dict)->dict: """ Runs profile recommendation state machine and generates recommendations for entire inventory profile. Args: analysed_orders (UncertainDemand): Analysed orders as UncertainDemand object forecast (dict): Forecast results for same data set. Returns: dict: Recommendations for entire inventory profile """ recommend = ProfileMachine() states = ProfileStates(analysed_orders=analysed_orders, forecast=forecast) recommend.add_state("start", states.initialise_machine) recommend.add_state("revenue", states.revenue) recommend.add_state("excess", states.excess) recommend.add_state("shortage", states.shortage) recommend.add_state("classification", states.classification) recommend.add_state("inventory", states.inventory_turns) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") recommend.run() # have to serialise the profile recommendation seperately from the sku recommendation return deserialise_config(ABS_FILE_PATH['PROFILE_PICKLE'])
def load(file_path: str, location: str = None): if location is not None and os.name in ['posix', 'mac']: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format(location) elif location is not None and os.name == 'nt': app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format(location) log.log(logging.DEBUG, 'Loading data analysis for reporting suite... \n') db.create_all() log.log(logging.DEBUG, 'loading currency symbols...\n') print('loading currency symbols...', end="") fx = currency_codes() for key, value in fx.items(): codes = Currency() codes.country = value[0] codes.symbol = value[1] codes.currency_code = key db.session.add(codes) db.session.commit() print('[COMPLETED]\n') config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) currency = config.get('currency') log.log(logging.DEBUG, 'Analysing file: {}...\n'.format(file_path)) print('Analysing file: {}...'.format(file_path), end="") orders_analysis = model_inventory.analyse(file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12,currency=currency) # remove assumption file type is csv ia = [analysis.orders_summary() for analysis in model_inventory.analyse(file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12, currency=currency)] date_now = datetime.datetime.now() analysis_summary = Inventory(processed_orders=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, 'Calculating Forecasts...\n') print('Calculating Forecasts...', end="") simple_forecast = {analysis.sku_id: analysis.simple_exponential_smoothing_forecast for analysis in model_inventory.analyse(file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12, currency=currency)} holts_forecast = {analysis.sku_id: analysis.holts_trend_corrected_forecast for analysis in model_inventory.analyse(file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12,currency=currency)} transact = TransactionLog() transact.date = date_now db.session.add(transact) db.session.commit() transaction_sub = db.session.query(db.func.max(TransactionLog.date)) transaction_id = db.session.query(TransactionLog).filter(TransactionLog.date == transaction_sub).first() # loads inventory profile recommendations load_profile_recommendations(analysed_order=orders_analysis, forecast=holts_forecast, transaction_log_id=transaction_id) #d = _Orchestrate() #d.update_database(int(transaction_id.id)) forecast_types = ('ses', 'htces') for f_type in forecast_types: forecast_type = ForecastType() forecast_type.type = f_type db.session.add(forecast_type) db.session.commit() ses_id = db.session.query(ForecastType.id).filter(ForecastType.type == forecast_types[0]).first() htces_id = db.session.query(ForecastType.id).filter(ForecastType.type == forecast_types[1]).first() print('[COMPLETED]\n') log.log(logging.DEBUG, 'loading database ...\n') print('loading database ...', end="") for item in ia: re = 0 skus_description = [summarised for summarised in analysis_summary.describe_sku(item['sku'])] denom = db.session.query(Currency.id).filter(Currency.currency_code == item['currency']).first() master_sku = MasterSkuList() master_sku.sku_id = item['sku'] db.session.add(master_sku) i_up = InventoryAnalysis() mk = db.session.query(MasterSkuList.id).filter(MasterSkuList.sku_id == item['sku']).first() i_up.sku_id = mk.id tuple_orders = item['orders'] # print(tuple_orders) i_up.abc_xyz_classification = item['ABC_XYZ_Classification'] i_up.standard_deviation = item['standard_deviation'] i_up.backlog = item['backlog'] i_up.safety_stock = item['safety_stock'] i_up.reorder_level = item['reorder_level'] i_up.economic_order_quantity = item['economic_order_quantity'] i_up.demand_variability = item['demand_variability'] i_up.average_orders = round(float(item['average_orders'])) i_up.shortages = item['shortages'] i_up.excess_stock = item['excess_stock'] i_up.reorder_quantity = item['reorder_quantity'] i_up.economic_order_variable_cost = item['economic_order_variable_cost'] i_up.unit_cost = item['unit_cost'] i_up.revenue = item['revenue'] i_up.date = date_now i_up.safety_stock_rank = skus_description[0]['safety_stock_rank'] i_up.shortage_rank = skus_description[0]['shortage_rank'] i_up.excess_cost = skus_description[0]['excess_cost'] i_up.percentage_contribution_revenue = skus_description[0]['percentage_contribution_revenue'] i_up.excess_rank = skus_description[0]['excess_rank'] i_up.retail_price = skus_description[0]['retail_price'] i_up.gross_profit_margin = skus_description[0]['gross_profit_margin'] i_up.min_order = skus_description[0]['min_order'] i_up.safety_stock_cost = skus_description[0]['safety_stock_cost'] i_up.revenue_rank = skus_description[0]['revenue_rank'] i_up.markup_percentage = skus_description[0]['markup_percentage'] i_up.max_order = skus_description[0]['max_order'] i_up.shortage_cost = skus_description[0]['shortage_cost'] i_up.quantity_on_hand = item['quantity_on_hand'] i_up.currency_id = denom.id i_up.traffic_light = skus_description[0]['inventory_traffic_light'] i_up.inventory_turns = skus_description[0]['inventory_turns'] i_up.transaction_log_id = transaction_id.id db.session.add(i_up) inva = db.session.query(InventoryAnalysis.id).filter(InventoryAnalysis.sku_id == mk.id).first() for i, t in enumerate(tuple_orders['demand'], 1): orders_data = Orders() # print(r) orders_data.order_quantity = t orders_data.rank = i orders_data.analysis_id = inva.id db.session.add(orders_data) # need to select sku id for i, forecasted_demand in enumerate(simple_forecast, 1): if forecasted_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = simple_forecast.get(forecasted_demand)['mape'] forecast_stats.forecast_type_id = ses_id.id forecast_stats.slope = simple_forecast.get(forecasted_demand)['statistics']['slope'] forecast_stats.p_value = simple_forecast.get(forecasted_demand)['statistics']['pvalue'] forecast_stats.test_statistic = simple_forecast.get(forecasted_demand)['statistics']['test_statistic'] forecast_stats.slope_standard_error = simple_forecast.get(forecasted_demand)['statistics'][ 'slope_standard_error'] forecast_stats.intercept = simple_forecast.get(forecasted_demand)['statistics']['intercept'] forecast_stats.standard_residuals = simple_forecast.get(forecasted_demand)['statistics'][ 'std_residuals'] forecast_stats.trending = simple_forecast.get(forecasted_demand)['statistics']['trend'] forecast_stats.optimal_alpha = simple_forecast.get(forecasted_demand)['optimal_alpha'] forecast_stats.optimal_gamma = 0 db.session.add(forecast_stats) for p in range(0, len(simple_forecast.get(forecasted_demand)['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = simple_forecast.get(forecasted_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = ses_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for q, sesf in enumerate(simple_forecast.get(forecasted_demand)['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = ses_id.id forecast_breakdown.trend = 0 forecast_breakdown.period = sesf['t'] forecast_breakdown.level_estimates = \ sesf['level_estimates'] forecast_breakdown.one_step_forecast = \ sesf['one_step_forecast'] forecast_breakdown.forecast_error = \ sesf['forecast_error'] forecast_breakdown.squared_error = sesf['squared_error'] forecast_breakdown.regression = simple_forecast.get(forecasted_demand)['regression'][q] db.session.add(forecast_breakdown) break for i, holts_forecast_demand in enumerate(holts_forecast, 1): if holts_forecast_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = holts_forecast.get(holts_forecast_demand)['mape'] forecast_stats.forecast_type_id = htces_id.id forecast_stats.slope = holts_forecast.get(holts_forecast_demand)['statistics']['slope'] forecast_stats.p_value = holts_forecast.get(holts_forecast_demand)['statistics']['pvalue'] forecast_stats.test_statistic = holts_forecast.get(holts_forecast_demand)['statistics'][ 'test_statistic'] forecast_stats.slope_standard_error = holts_forecast.get(holts_forecast_demand)['statistics'][ 'slope_standard_error'] forecast_stats.intercept = holts_forecast.get(holts_forecast_demand)['statistics']['intercept'] forecast_stats.standard_residuals = holts_forecast.get(holts_forecast_demand)['statistics'][ 'std_residuals'] forecast_stats.trending = holts_forecast.get(holts_forecast_demand)['statistics']['trend'] forecast_stats.optimal_alpha = holts_forecast.get(holts_forecast_demand)['optimal_alpha'] forecast_stats.optimal_gamma = holts_forecast.get(holts_forecast_demand)['optimal_gamma'] db.session.add(forecast_stats) for p in range(0, len(holts_forecast.get(holts_forecast_demand)['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = holts_forecast.get(holts_forecast_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = htces_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for i, htcesf in enumerate(holts_forecast.get(holts_forecast_demand)['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = htces_id.id forecast_breakdown.trend = htcesf['trend'] forecast_breakdown.period = htcesf['t'] forecast_breakdown.level_estimates = \ htcesf['level_estimates'] forecast_breakdown.one_step_forecast = \ htcesf['one_step_forecast'] forecast_breakdown.forecast_error = \ htcesf['forecast_error'] forecast_breakdown.squared_error = htcesf['squared_error'] forecast_breakdown.regression = holts_forecast.get(holts_forecast_demand)['regression'][i] db.session.add(forecast_breakdown) break db.session.commit() print('[COMPLETED]\n') loading = 'Loading recommendations into database... ' print(loading, end="") load_recommendations(summary=ia, forecast=holts_forecast, analysed_order=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, "Analysis ...\n") print("Analysis ... [COMPLETED]")
recommend.add_state("start", states.initialise_machine) recommend.add_state("revenue", states.revenue) recommend.add_state("excess", states.excess) recommend.add_state("shortage", states.shortage) recommend.add_state("classification", states.classification) recommend.add_state("inventory", states.inventory_turns) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") recommend.run() # have to serialise the profile recommendation seperately from the sku recommendation return deserialise_config(ABS_FILE_PATH['PROFILE_PICKLE']) if __name__ == '__main__': orders_analysis = model_inventory.analyse(file_path=ABS_FILE_PATH['COMPLETE_CSV_SM'], z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12) # d = ProfileGenerator(analysed_orders=orders_analysis) # d.Top_Concerns() # resp = {} #for i in run_sku_recommendation(analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)).values(): # print(i) # d = ProfileStates(analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)) # d.revenue() for i in run_profile_recommendation(analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)).values(): print(i)
recommend.add_state("excess", states.excess) recommend.add_state("shortage", states.shortage) recommend.add_state("classification", states.classification) recommend.add_state("inventory", states.inventory_turns) recommend.add_state("recommendation", recommend, end_state=1) recommend.set_start("start") recommend.run() # have to serialise the profile recommendation seperately from the sku recommendation return deserialise_config(ABS_FILE_PATH['PROFILE_PICKLE']) if __name__ == '__main__': orders_analysis = model_inventory.analyse( file_path=ABS_FILE_PATH['COMPLETE_CSV_SM'], z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12) # d = ProfileGenerator(analysed_orders=orders_analysis) # d.Top_Concerns() # resp = {} #for i in run_sku_recommendation(analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)).values(): # print(i) # d = ProfileStates(analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)) # d.revenue() for i in run_profile_recommendation( analysed_orders=orders_analysis, forecast=deserialise_config(FORECAST_PICKLE)).values(): print(i)
def main(): parser = argparse.ArgumentParser( description='Supplychainpy commandline interface a') parser.add_argument(dest='filenames', metavar='filename', nargs='?') parser.add_argument( '-l', '--launch', dest='launch', action='store_true', help='Launches supplychainpy reporting gui for setting \ port and launching the default browser.') parser.add_argument( '-lx', '--launch-console', dest='launch_console', action='store_true', help='Launches supplychainpy reporting in the default browser, \ without gui interface. Uses default port (5000) unless another port \ is specified. ') parser.add_argument( '-a', '--analyse', dest='analyse_file', action='store_true', help='Processes the file supplied as the first argument \ and performs analysis') parser.add_argument('-o', dest='outfile', action='store', help='output file') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='verbose mode') parser.add_argument('-db', dest='database', action='store', help='database engine uri e.g. ') parser.add_argument( '-cur', dest='currency', action='store', help='Sets the currency for the analysis. The currency should \ match the currency of the raw data. IMPORTANT: Currency \ conversion does not occur by setting this flag. The default \ currency is US Dollars (USD). ', default='USD') parser.add_argument('--host', dest='host', action='store', help='Sets the host for the server \ (defaults 127.0.0.1) ', default='127.0.0.1') parser.add_argument('--debug', dest='debug', action='store_true', help='Runs in debug mode (default : debug.INFO)') parser.add_argument('-loc', dest='location', action='store', help='database path e.g. ') parser.add_argument('-u', dest='user', action='store', help='user name ') parser.add_argument('-p', '--port', dest='port', action='store', help='port to use for local server e.g. 8080 \ (default: 5000)', default='5000') parser.add_argument('-c', dest='chat', action='store_true', help='chat to dash from the command line') args = parser.parse_args() if args.verbose: print('filenames = {}'.format(args.filenames)) print('launch reports = {}'.format(args.launch)) print('process file = {}'.format(args.analyse_file)) print('database location = {}'.format(args.location)) if args.debug: logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') if args.launch and args.analyse_file is None and args.filenames is not None and args.location: # -l -loc print(1) if args.currency is not None: currency = args.currency else: currency = 'USD' app_settings = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app_settings['database_path'] = args.location app_settings['file'] = args.filenames app_settings['currency'] = currency serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) #d = _Orchestrate() #d.copy_file() #db_present = d.check_for_db() #if db_present: # create_management_db() launch_load_report(args.filenames, args.location) elif args.launch and args.analyse_file and args.filenames is not None and args.location is not None: # -a -loc -l print(2) if args.currency is not None: currency = args.currency else: currency = 'USD' app_settings = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app_settings['database_path'] = args.location app_settings['file'] = args.filenames app_settings['currency'] = currency serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) #d = _Orchestrate() #d.copy_file() #db_present = d.check_for_db() #if db_present: # create_management_db() launch_load_report(args.filenames, args.location) elif args.launch and args.location is not None and args.host: # -l -loc --host print(3) app_settings = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app_settings['database_path'] = args.location app_settings['port'] = args.port serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) launch_report(location=args.location, host=args.host, port=args.port) elif args.launch and args.analyse_file and args.filenames and args.location: # -a -l -loc print(4) if args.currency is not None: currency = args.currency else: currency = 'USD' app_settings = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app_settings['database_path'] = args.location app_settings['file'] = args.filenames app_settings['currency'] = currency serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) #d = _Orchestrate() #d.copy_file() #db_present = d.check_for_db() #if db_present: # create_management_db() launch_load_report(args.filenames, args.location) launch_report(location=args.location, host=args.host, port=args.port) elif args.analyse_file and args.location is not None and args.filenames is not None and args.launch_console is None: # -a load_db(file=args.filenames, location=args.location) elif args.analyse_file and args.location and args.filenames and args.launch_console and args.port: # -a -loc --lx -p try: print(5) app_settings = { 'database_path': args.location, 'file': args.filenames, 'currency': args.currency } serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) #d = _Orchestrate() #d.copy_file() #db_present = d.check_for_db() #if db_present: # create_management_db() load_db(file=args.filenames, location=args.location) launch_report_server(location=args.location, port=args.port, host=args.host) except OSError as e: print(e) elif args.location and args.launch_console and args.port and args.host: # -loc -lx -p --host try: print(6) app_settings = { 'database_path': args.location, 'host': args.host, 'currency': args.currency } serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) launch_report_server(location=args.location, port=args.port, host=args.host) except OSError as e: print(e) elif args.analyse_file and args.location is not None and args.filenames is not None and args.launch_console is None and args.host: # -a -loc -lx --host print(7) app_settings = { 'database_path': args.location, 'host': args.host, 'currency': args.currency } serialise_config(app_settings, ABS_FILE_PATH_APPLICATION_CONFIG) launch_report_server(location=args.location, port=args.port, host=args.host) elif args.chat: msg = input("Enter message for Dash: ") while msg != 'q': dash = ChatBot() response = dash.chat_machine(message=msg) for i in response: print('Dash> ', i) msg = input("> ")
def load(file_path: str, location: str = None): """ Loads analysis and forecast into local database for reporting suite. Args: file_path (str): File path to source file containing data for analysis. location (str): Location of database to populate. """ try: app = create_app() db.init_app(app) if location is not None and os.name in ['posix', 'mac']: app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format( location) elif location is not None and os.name == 'nt': app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format( location) log.log(logging.DEBUG, 'Loading data analysis for reporting suite... \n') with app.app_context(): db.create_all() log.log(logging.DEBUG, 'loading currency symbols...\n') print('loading currency symbols...', end="") fx = currency_codes() load_currency(fx, db) print('[COMPLETED]\n') config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) currency = config.get('currency') log.log(logging.DEBUG, 'Analysing file: {}...\n'.format(file_path)) print('Analysing file: {}...'.format(file_path), end="") orders_analysis = model_inventory.analyse( file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12, currency=currency) ia = [analysis.orders_summary() for analysis in orders_analysis] date_now = datetime.datetime.now() analysis_summary = Inventory(processed_orders=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, 'Calculating Forecasts...\n') print('Calculating Forecasts...', end="") cores = int(multiprocessing.cpu_count()) cores -= 1 import multiprocessing as mp simple_forecast_gen = {} simple_forecast = {} with mp.Pool(processes=cores) as pool: simple_forecast_gen = ({ analysis.sku_id: pool.apply_async(_analysis_forecast_simple, args=(analysis, )) } for analysis in orders_analysis) for gen in simple_forecast_gen: simple_forecast.update(gen) simple_forecast = { key: value.get() for key, value in simple_forecast.items() } holts_forecast_gen = { analysis.sku_id: pool.apply_async(_analysis_forecast_holt, args=(analysis, )) for analysis in orders_analysis } holts_forecast = { key: holts_forecast_gen[key].get() for key in holts_forecast_gen } # with ProcessPoolExecutor(max_workers=cores) as executor: # simple_forecast_futures = { analysis.sku_id: executor.submit(_analysis_forecast_simple, analysis) for analysis in orders_analysis} # simple_forecast_gen = {future: concurrent.futures.as_completed(simple_forecast_futures[future]) for future in simple_forecast_futures} # simple_forecast = {value: simple_forecast_futures[value].result() for value in simple_forecast_gen} # holts_forecast_futures = { analysis.sku_id: executor.submit(_analysis_forecast_holt, analysis) for analysis in orders_analysis} # holts_forecast_gen = { future: concurrent.futures.as_completed(holts_forecast_futures[future]) for future in holts_forecast_futures} # holts_forecast = {value: holts_forecast_futures[value].result() for value in holts_forecast_gen} # executor.shutdown(wait=False) transact = TransactionLog() transact.date = date_now db.session.add(transact) db.session.commit() transaction_sub = db.session.query(db.func.max( TransactionLog.date)) transaction_id = db.session.query(TransactionLog).filter( TransactionLog.date == transaction_sub).first() load_profile_recommendations(analysed_order=orders_analysis, forecast=holts_forecast, transaction_log_id=transaction_id) # d = _Orchestrate() # d.update_database(int(transaction_id.id)) forecast_types = ('ses', 'htces') for f_type in forecast_types: forecast_type = ForecastType() forecast_type.type = f_type db.session.add(forecast_type) db.session.commit() ses_id = db.session.query(ForecastType.id).filter( ForecastType.type == forecast_types[0]).first() htces_id = db.session.query(ForecastType.id).filter( ForecastType.type == forecast_types[1]).first() print('[COMPLETED]\n') log.log(logging.DEBUG, 'loading database ...\n') print('loading database ...', end="") for item in ia: re = 0 skus_description = [ summarised for summarised in analysis_summary.describe_sku( item['sku']) ] denom = db.session.query(Currency.id).filter( Currency.currency_code == item['currency']).first() master_sku = MasterSkuList() master_sku.sku_id = item['sku'] db.session.add(master_sku) i_up = InventoryAnalysis() mk = db.session.query(MasterSkuList.id).filter( MasterSkuList.sku_id == item['sku']).first() i_up.sku_id = mk.id tuple_orders = item['orders'] # print(tuple_orders) i_up.abc_xyz_classification = item['ABC_XYZ_Classification'] i_up.standard_deviation = item['standard_deviation'] i_up.backlog = item['backlog'] i_up.safety_stock = item['safety_stock'] i_up.reorder_level = item['reorder_level'] i_up.economic_order_quantity = item['economic_order_quantity'] i_up.demand_variability = item['demand_variability'] i_up.average_orders = round(float(item['average_orders'])) i_up.shortages = item['shortages'] i_up.excess_stock = item['excess_stock'] i_up.reorder_quantity = item['reorder_quantity'] i_up.economic_order_variable_cost = item[ 'economic_order_variable_cost'] i_up.unit_cost = item['unit_cost'] i_up.revenue = item['revenue'] i_up.date = date_now i_up.safety_stock_rank = skus_description[0][ 'safety_stock_rank'] i_up.shortage_rank = skus_description[0]['shortage_rank'] i_up.excess_cost = skus_description[0]['excess_cost'] i_up.percentage_contribution_revenue = skus_description[0][ 'percentage_contribution_revenue'] i_up.excess_rank = skus_description[0]['excess_rank'] i_up.retail_price = skus_description[0]['retail_price'] i_up.gross_profit_margin = skus_description[0][ 'gross_profit_margin'] i_up.min_order = skus_description[0]['min_order'] i_up.safety_stock_cost = skus_description[0][ 'safety_stock_cost'] i_up.revenue_rank = skus_description[0]['revenue_rank'] i_up.markup_percentage = skus_description[0][ 'markup_percentage'] i_up.max_order = skus_description[0]['max_order'] i_up.shortage_cost = skus_description[0]['shortage_cost'] i_up.quantity_on_hand = item['quantity_on_hand'] i_up.currency_id = denom.id i_up.traffic_light = skus_description[0][ 'inventory_traffic_light'] i_up.inventory_turns = skus_description[0]['inventory_turns'] i_up.transaction_log_id = transaction_id.id db.session.add(i_up) inva = db.session.query(InventoryAnalysis.id).filter( InventoryAnalysis.sku_id == mk.id).first() for i, t in enumerate(tuple_orders['demand'], 1): orders_data = Orders() # print(r) orders_data.order_quantity = t orders_data.rank = i orders_data.analysis_id = inva.id db.session.add(orders_data) # need to select sku id for i, forecasted_demand in enumerate(simple_forecast, 1): if forecasted_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = simple_forecast.get( forecasted_demand)['mape'] forecast_stats.forecast_type_id = ses_id.id forecast_stats.slope = simple_forecast.get( forecasted_demand)['statistics']['slope'] forecast_stats.p_value = simple_forecast.get( forecasted_demand)['statistics']['pvalue'] forecast_stats.test_statistic = simple_forecast.get( forecasted_demand)['statistics']['test_statistic'] forecast_stats.slope_standard_error = simple_forecast.get( forecasted_demand )['statistics']['slope_standard_error'] forecast_stats.intercept = simple_forecast.get( forecasted_demand)['statistics']['intercept'] forecast_stats.standard_residuals = simple_forecast.get( forecasted_demand)['statistics']['std_residuals'] forecast_stats.trending = simple_forecast.get( forecasted_demand)['statistics']['trend'] forecast_stats.optimal_alpha = simple_forecast.get( forecasted_demand)['optimal_alpha'] forecast_stats.optimal_gamma = 0 db.session.add(forecast_stats) for p in range( 0, len( simple_forecast.get(forecasted_demand) ['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = simple_forecast.get( forecasted_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = ses_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for q, sesf in enumerate( simple_forecast.get(forecasted_demand) ['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = ses_id.id forecast_breakdown.trend = 0 forecast_breakdown.period = sesf['t'] forecast_breakdown.level_estimates = \ sesf['level_estimates'] forecast_breakdown.one_step_forecast = \ sesf['one_step_forecast'] forecast_breakdown.forecast_error = \ sesf['forecast_error'] forecast_breakdown.squared_error = sesf[ 'squared_error'] forecast_breakdown.regression = simple_forecast.get( forecasted_demand)['regression'][q] db.session.add(forecast_breakdown) break for i, holts_forecast_demand in enumerate(holts_forecast, 1): if holts_forecast_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = holts_forecast.get( holts_forecast_demand)['mape'] forecast_stats.forecast_type_id = htces_id.id forecast_stats.slope = holts_forecast.get( holts_forecast_demand)['statistics']['slope'] forecast_stats.p_value = holts_forecast.get( holts_forecast_demand)['statistics']['pvalue'] forecast_stats.test_statistic = holts_forecast.get( holts_forecast_demand )['statistics']['test_statistic'] forecast_stats.slope_standard_error = holts_forecast.get( holts_forecast_demand )['statistics']['slope_standard_error'] forecast_stats.intercept = holts_forecast.get( holts_forecast_demand)['statistics']['intercept'] forecast_stats.standard_residuals = holts_forecast.get( holts_forecast_demand )['statistics']['std_residuals'] forecast_stats.trending = holts_forecast.get( holts_forecast_demand)['statistics']['trend'] forecast_stats.optimal_alpha = holts_forecast.get( holts_forecast_demand)['optimal_alpha'] forecast_stats.optimal_gamma = holts_forecast.get( holts_forecast_demand)['optimal_gamma'] db.session.add(forecast_stats) for p in range( 0, len( holts_forecast.get(holts_forecast_demand) ['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = holts_forecast.get( holts_forecast_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = htces_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for i, htcesf in enumerate( holts_forecast.get(holts_forecast_demand) ['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = htces_id.id forecast_breakdown.trend = htcesf['trend'] forecast_breakdown.period = htcesf['t'] forecast_breakdown.level_estimates = \ htcesf['level_estimates'] forecast_breakdown.one_step_forecast = \ htcesf['one_step_forecast'] forecast_breakdown.forecast_error = \ htcesf['forecast_error'] forecast_breakdown.squared_error = htcesf[ 'squared_error'] forecast_breakdown.regression = holts_forecast.get( holts_forecast_demand)['regression'][i] db.session.add(forecast_breakdown) break db.session.commit() print('[COMPLETED]\n') loading = 'Loading recommendations into database... ' print(loading, end="") load_recommendations(summary=ia, forecast=holts_forecast, analysed_order=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, "Analysis ...\n") print("Analysis ... [COMPLETED]") except OSError as e: print(e)
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os from supplychainpy._helpers._config_file_paths import ABS_FILE_PATH_APPLICATION_CONFIG from supplychainpy._helpers._pickle_config import deserialise_config from supplychainpy.reporting.app import create_app from supplychainpy.reporting.extensions import db config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app = create_app() if os.name in ['posix', 'mac']: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format( config.get('database_path')) elif os.name == 'nt': app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format( config.get('database_path')) with app.app_context(): db.create_all() if __name__ == "__main__": """Target this file for running application on a server using http server like Nginx and Gunicorn. """ app.run()