def __init__(self): threading.Thread.__init__(self) self.message = "launching reports" self.name = "reports" self.port = 5000 self.host = '127.0.0.1' self.app = create_app()
def launch_report_server(location: str = None, port: int = 5000, host: str = '127.0.0.1'): app = create_app() if location is not None and os.name in ['posix', 'mac']: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format(location) elif location is not None and os.name == 'nt': app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format(location) db.create_all() app_launch = ReportsLauncher() app_launch.port = port app_launch.host = host
def load_db(file: str, location: str = None): from supplychainpy.reporting import load app = create_app() if location is not None and os.name in ['posix', 'mac']: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format(location) elif location is not None and os.name == 'nt': app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format(location) db.create_all() if location is not None: load.load(file, location) else: load.load(file)
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, # INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os from supplychainpy._helpers._config_file_paths import ABS_FILE_PATH_APPLICATION_CONFIG from supplychainpy._helpers._pickle_config import deserialise_config from supplychainpy.reporting.app import create_app from supplychainpy.reporting.extensions import db config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) app = create_app() if os.name in ['posix', 'mac']: app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format( config.get('database_path')) elif os.name == 'nt': app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format( config.get('database_path')) with app.app_context(): db.create_all() if __name__ == "__main__": """Target this file for running application on a server using http server like Nginx and Gunicorn. """ app.run()
def load(file_path: str, location: str = None): """ Loads analysis and forecast into local database for reporting suite. Args: file_path (str): File path to source file containing data for analysis. location (str): Location of database to populate. """ try: app = create_app() db.init_app(app) if location is not None and os.name in ['posix', 'mac']: app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}/reporting.db'.format( location) elif location is not None and os.name == 'nt': app.config[ 'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}\\reporting.db'.format( location) log.log(logging.DEBUG, 'Loading data analysis for reporting suite... \n') with app.app_context(): db.create_all() log.log(logging.DEBUG, 'loading currency symbols...\n') print('loading currency symbols...', end="") fx = currency_codes() load_currency(fx, db) print('[COMPLETED]\n') config = deserialise_config(ABS_FILE_PATH_APPLICATION_CONFIG) currency = config.get('currency') log.log(logging.DEBUG, 'Analysing file: {}...\n'.format(file_path)) print('Analysing file: {}...'.format(file_path), end="") orders_analysis = model_inventory.analyse( file_path=file_path, z_value=Decimal(1.28), reorder_cost=Decimal(5000), file_type="csv", length=12, currency=currency) ia = [analysis.orders_summary() for analysis in orders_analysis] date_now = datetime.datetime.now() analysis_summary = Inventory(processed_orders=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, 'Calculating Forecasts...\n') print('Calculating Forecasts...', end="") cores = int(multiprocessing.cpu_count()) cores -= 1 import multiprocessing as mp simple_forecast_gen = {} simple_forecast = {} with mp.Pool(processes=cores) as pool: simple_forecast_gen = ({ analysis.sku_id: pool.apply_async(_analysis_forecast_simple, args=(analysis, )) } for analysis in orders_analysis) for gen in simple_forecast_gen: simple_forecast.update(gen) simple_forecast = { key: value.get() for key, value in simple_forecast.items() } holts_forecast_gen = { analysis.sku_id: pool.apply_async(_analysis_forecast_holt, args=(analysis, )) for analysis in orders_analysis } holts_forecast = { key: holts_forecast_gen[key].get() for key in holts_forecast_gen } # with ProcessPoolExecutor(max_workers=cores) as executor: # simple_forecast_futures = { analysis.sku_id: executor.submit(_analysis_forecast_simple, analysis) for analysis in orders_analysis} # simple_forecast_gen = {future: concurrent.futures.as_completed(simple_forecast_futures[future]) for future in simple_forecast_futures} # simple_forecast = {value: simple_forecast_futures[value].result() for value in simple_forecast_gen} # holts_forecast_futures = { analysis.sku_id: executor.submit(_analysis_forecast_holt, analysis) for analysis in orders_analysis} # holts_forecast_gen = { future: concurrent.futures.as_completed(holts_forecast_futures[future]) for future in holts_forecast_futures} # holts_forecast = {value: holts_forecast_futures[value].result() for value in holts_forecast_gen} # executor.shutdown(wait=False) transact = TransactionLog() transact.date = date_now db.session.add(transact) db.session.commit() transaction_sub = db.session.query(db.func.max( TransactionLog.date)) transaction_id = db.session.query(TransactionLog).filter( TransactionLog.date == transaction_sub).first() load_profile_recommendations(analysed_order=orders_analysis, forecast=holts_forecast, transaction_log_id=transaction_id) # d = _Orchestrate() # d.update_database(int(transaction_id.id)) forecast_types = ('ses', 'htces') for f_type in forecast_types: forecast_type = ForecastType() forecast_type.type = f_type db.session.add(forecast_type) db.session.commit() ses_id = db.session.query(ForecastType.id).filter( ForecastType.type == forecast_types[0]).first() htces_id = db.session.query(ForecastType.id).filter( ForecastType.type == forecast_types[1]).first() print('[COMPLETED]\n') log.log(logging.DEBUG, 'loading database ...\n') print('loading database ...', end="") for item in ia: re = 0 skus_description = [ summarised for summarised in analysis_summary.describe_sku( item['sku']) ] denom = db.session.query(Currency.id).filter( Currency.currency_code == item['currency']).first() master_sku = MasterSkuList() master_sku.sku_id = item['sku'] db.session.add(master_sku) i_up = InventoryAnalysis() mk = db.session.query(MasterSkuList.id).filter( MasterSkuList.sku_id == item['sku']).first() i_up.sku_id = mk.id tuple_orders = item['orders'] # print(tuple_orders) i_up.abc_xyz_classification = item['ABC_XYZ_Classification'] i_up.standard_deviation = item['standard_deviation'] i_up.backlog = item['backlog'] i_up.safety_stock = item['safety_stock'] i_up.reorder_level = item['reorder_level'] i_up.economic_order_quantity = item['economic_order_quantity'] i_up.demand_variability = item['demand_variability'] i_up.average_orders = round(float(item['average_orders'])) i_up.shortages = item['shortages'] i_up.excess_stock = item['excess_stock'] i_up.reorder_quantity = item['reorder_quantity'] i_up.economic_order_variable_cost = item[ 'economic_order_variable_cost'] i_up.unit_cost = item['unit_cost'] i_up.revenue = item['revenue'] i_up.date = date_now i_up.safety_stock_rank = skus_description[0][ 'safety_stock_rank'] i_up.shortage_rank = skus_description[0]['shortage_rank'] i_up.excess_cost = skus_description[0]['excess_cost'] i_up.percentage_contribution_revenue = skus_description[0][ 'percentage_contribution_revenue'] i_up.excess_rank = skus_description[0]['excess_rank'] i_up.retail_price = skus_description[0]['retail_price'] i_up.gross_profit_margin = skus_description[0][ 'gross_profit_margin'] i_up.min_order = skus_description[0]['min_order'] i_up.safety_stock_cost = skus_description[0][ 'safety_stock_cost'] i_up.revenue_rank = skus_description[0]['revenue_rank'] i_up.markup_percentage = skus_description[0][ 'markup_percentage'] i_up.max_order = skus_description[0]['max_order'] i_up.shortage_cost = skus_description[0]['shortage_cost'] i_up.quantity_on_hand = item['quantity_on_hand'] i_up.currency_id = denom.id i_up.traffic_light = skus_description[0][ 'inventory_traffic_light'] i_up.inventory_turns = skus_description[0]['inventory_turns'] i_up.transaction_log_id = transaction_id.id db.session.add(i_up) inva = db.session.query(InventoryAnalysis.id).filter( InventoryAnalysis.sku_id == mk.id).first() for i, t in enumerate(tuple_orders['demand'], 1): orders_data = Orders() # print(r) orders_data.order_quantity = t orders_data.rank = i orders_data.analysis_id = inva.id db.session.add(orders_data) # need to select sku id for i, forecasted_demand in enumerate(simple_forecast, 1): if forecasted_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = simple_forecast.get( forecasted_demand)['mape'] forecast_stats.forecast_type_id = ses_id.id forecast_stats.slope = simple_forecast.get( forecasted_demand)['statistics']['slope'] forecast_stats.p_value = simple_forecast.get( forecasted_demand)['statistics']['pvalue'] forecast_stats.test_statistic = simple_forecast.get( forecasted_demand)['statistics']['test_statistic'] forecast_stats.slope_standard_error = simple_forecast.get( forecasted_demand )['statistics']['slope_standard_error'] forecast_stats.intercept = simple_forecast.get( forecasted_demand)['statistics']['intercept'] forecast_stats.standard_residuals = simple_forecast.get( forecasted_demand)['statistics']['std_residuals'] forecast_stats.trending = simple_forecast.get( forecasted_demand)['statistics']['trend'] forecast_stats.optimal_alpha = simple_forecast.get( forecasted_demand)['optimal_alpha'] forecast_stats.optimal_gamma = 0 db.session.add(forecast_stats) for p in range( 0, len( simple_forecast.get(forecasted_demand) ['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = simple_forecast.get( forecasted_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = ses_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for q, sesf in enumerate( simple_forecast.get(forecasted_demand) ['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = ses_id.id forecast_breakdown.trend = 0 forecast_breakdown.period = sesf['t'] forecast_breakdown.level_estimates = \ sesf['level_estimates'] forecast_breakdown.one_step_forecast = \ sesf['one_step_forecast'] forecast_breakdown.forecast_error = \ sesf['forecast_error'] forecast_breakdown.squared_error = sesf[ 'squared_error'] forecast_breakdown.regression = simple_forecast.get( forecasted_demand)['regression'][q] db.session.add(forecast_breakdown) break for i, holts_forecast_demand in enumerate(holts_forecast, 1): if holts_forecast_demand == item['sku']: forecast_stats = ForecastStatistics() forecast_stats.analysis_id = inva.id forecast_stats.mape = holts_forecast.get( holts_forecast_demand)['mape'] forecast_stats.forecast_type_id = htces_id.id forecast_stats.slope = holts_forecast.get( holts_forecast_demand)['statistics']['slope'] forecast_stats.p_value = holts_forecast.get( holts_forecast_demand)['statistics']['pvalue'] forecast_stats.test_statistic = holts_forecast.get( holts_forecast_demand )['statistics']['test_statistic'] forecast_stats.slope_standard_error = holts_forecast.get( holts_forecast_demand )['statistics']['slope_standard_error'] forecast_stats.intercept = holts_forecast.get( holts_forecast_demand)['statistics']['intercept'] forecast_stats.standard_residuals = holts_forecast.get( holts_forecast_demand )['statistics']['std_residuals'] forecast_stats.trending = holts_forecast.get( holts_forecast_demand)['statistics']['trend'] forecast_stats.optimal_alpha = holts_forecast.get( holts_forecast_demand)['optimal_alpha'] forecast_stats.optimal_gamma = holts_forecast.get( holts_forecast_demand)['optimal_gamma'] db.session.add(forecast_stats) for p in range( 0, len( holts_forecast.get(holts_forecast_demand) ['forecast'])): forecast_data = Forecast() forecast_data.forecast_quantity = holts_forecast.get( holts_forecast_demand)['forecast'][p] forecast_data.analysis_id = inva.id forecast_data.forecast_type_id = htces_id.id forecast_data.period = p + 1 forecast_data.create_date = date_now db.session.add(forecast_data) for i, htcesf in enumerate( holts_forecast.get(holts_forecast_demand) ['forecast_breakdown']): forecast_breakdown = ForecastBreakdown() forecast_breakdown.analysis_id = inva.id forecast_breakdown.forecast_type_id = htces_id.id forecast_breakdown.trend = htcesf['trend'] forecast_breakdown.period = htcesf['t'] forecast_breakdown.level_estimates = \ htcesf['level_estimates'] forecast_breakdown.one_step_forecast = \ htcesf['one_step_forecast'] forecast_breakdown.forecast_error = \ htcesf['forecast_error'] forecast_breakdown.squared_error = htcesf[ 'squared_error'] forecast_breakdown.regression = holts_forecast.get( holts_forecast_demand)['regression'][i] db.session.add(forecast_breakdown) break db.session.commit() print('[COMPLETED]\n') loading = 'Loading recommendations into database... ' print(loading, end="") load_recommendations(summary=ia, forecast=holts_forecast, analysed_order=orders_analysis) print('[COMPLETED]\n') log.log(logging.DEBUG, "Analysis ...\n") print("Analysis ... [COMPLETED]") except OSError as e: print(e)