accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32)) # initializing all variables init = tf.global_variables_initializer() # launch the graph saver = tf.train.Saver(tf.global_variables(), max_to_keep=1) tf.add_to_collection('accuracy', accuracy) tf.add_to_collection('x1', x1) tf.add_to_collection('x2', x2) tf.add_to_collection('y', y) with tf.Session() as sess: sess.run(init) step = 1 model = data_helpers.load_model( './Data/GoogleNews-vectors-negative300.bin') if test == 'PDTB': sentences1, sentences2, labels = data_helpers.load_labels_and_data_PDTB( model, './Data/PDTB_implicit/train.txt') elif test == 'SICK': sentences1, sentences2, labels = data_helpers.load_data_SICK( model, './Data/SICK/train.txt') total = 0 while total < training_iters: start = total % len(sentences1) end = (total + batch_size) % len(sentences1) if end <= start: end = len(sentences1) batch_x1 = sentences1[start:end] batch_x2 = sentences2[start:end]
def forecast_oot(req_dict): # Get all future forecast , future forecast analysis, retailer forecast, retailer forecast analysis future_steps = int(req_dict.futurePeriod) past_steps = int(req_dict.historicalPeriod) start = req_dict.forecastStart analysis_windows = get_window_periods(start, future_steps=future_steps, past_steps=past_steps) # Load data & model filename = req_dict.data country = req_dict.market category = req_dict.category skus = req_dict.sku df_skus, df_retailers = load_data(filename=filename, skus=skus, country=country, category=category) models = load_model() forecast_sku = _forecast_sku(df=df_skus, models=models, analysis_windows=analysis_windows, category=category, country=country) disp_sku_for_cols = [ 'sku', 'market', 'category', 'forecastWeek', 'forecastVolume' ] sku_forecast_week = forecast_sku['weeklyForecast'] sku_list = (sku_forecast_week['sku'].unique()).tolist() sku_forecast_week = sku_forecast_week[disp_sku_for_cols].to_dict('records') disp_sku_for_cols = [ 'sku', 'market', 'category', 'month', 'forecastVolume' ] sku_forecast_month = forecast_sku['monthlyForecast'] sku_forecast_month = sku_forecast_month[disp_sku_for_cols].to_dict( 'records') forecast_retailer_sku = _forecast_retailer_sku( dfs=forecast_sku['weeklyForecast'], dfr=df_retailers, analysis_windows=analysis_windows, category=category, country=country) disp_ret_sku_for_cols = [ 'sku', 'forecastWeek', 'category', 'market', 'accountPlanningGroupCode', 'retForecastVolume' ] ret_sku_forecast_week = forecast_retailer_sku['weeklyForecast'] ret_list = ( ret_sku_forecast_week['accountPlanningGroupCode'].unique()).tolist() ret_sku_analysis = forecast_retailer_sku['weeklyForecast'].copy() ret_sku_forecast_week = ret_sku_forecast_week[disp_ret_sku_for_cols] ret_sku_forecast_week.rename( columns={'retForecastVolume': 'forecastVolume'}, inplace=True) ret_sku_forecast_week = ret_sku_forecast_week.to_dict('records') ret_sku_forecast_month = forecast_retailer_sku['monthlyForecast'] disp_ret_sku_for_cols = [ 'sku', 'month', 'category', 'market', 'accountPlanningGroupCode', 'retForecastVolume' ] ret_sku_forecast_month = ret_sku_forecast_month[disp_ret_sku_for_cols] ret_sku_forecast_month.rename( columns={'retForecastVolume': 'forecastVolume'}, inplace=True) ret_sku_forecast_month = ret_sku_forecast_month.to_dict('records') forecast_sku_analysis = _forecast_sku_analysis( df=df_skus, models=models, analysis_windows=analysis_windows, category=category, country=country) _ft_sku_act = forecast_sku_analysis['weeklyForecast'].copy() _ft_sku_act['actualVolume'] = round(_ft_sku_act['actualVolume'], 0) _ft_sku_act['forecastVolume'] = round(_ft_sku_act['forecastVolume'], 0) _ft_sku_act['forecastError'] = _ft_sku_act.apply(lambda x: calc_error( actual=x['actualVolume'], forecast=x['forecastVolume']), axis=1) _ft_sku_act['ape'] = _ft_sku_act.apply(lambda x: calc_ape( actual=x['actualVolume'], forecast=x['forecastVolume']), axis=1) _ft_sku_act['accuracy'] = round((100 - _ft_sku_act['ape']), 0) _ft_sku_act['bias'] = _ft_sku_act.apply(lambda x: calc_bias( actual=x['actualVolume'], forecast=x['forecastVolume']), axis=1) _ft_sku_act['extremeBias'] = _ft_sku_act.apply( lambda x: calc_extreme_bias(x['actualVolume'], x['forecastVolume']), axis=1) overall_accuracy = round(_ft_sku_act['accuracy'].mean(), 0) # overall_bias = round(_ft_sku_act['bias'].mean(), 0) overall_bias = calc_total_bias(_ft_sku_act) __sku_forecast = forecast_sku['weeklyForecast'] __sku_forecast['actualVolume'] = np.nan # sku_analysis_df = pd.concat([_ft_sku_act, __sku_forecast]) # Append % to ape, bias, accuracy _ft_sku_act_disp = _ft_sku_act.copy() _ft_sku_act_disp['accuracy'] = _ft_sku_act_disp.apply( lambda x: (str(int(x['accuracy'])) + '%'), axis=1) _ft_sku_act_disp['bias'] = _ft_sku_act_disp.apply( lambda x: (str(int(x['bias'])) + '%'), axis=1) _ft_sku_act_disp['ape'] = _ft_sku_act_disp.apply( lambda x: (str(int(x['ape'])) + '%'), axis=1) # sku_analysis_df = pd.concat([__sku_forecast.sort_values(by='forecastWeek', ascending=False), _ft_sku_act]) sku_analysis_df = pd.concat([ __sku_forecast.sort_values(by='forecastWeek', ascending=False), _ft_sku_act_disp ]) sku_analysis_df.fillna('N/A', inplace=True) sku_analysis_df = sku_analysis_df.drop_duplicates() sku_analysis_df_dict = sku_analysis_df.to_dict('records') # # Total Graph act_graph_sales = _ft_sku_act.groupby( 'forecastWeek', as_index=False)['actualVolume'].sum() act_graph_week = act_graph_sales['forecastWeek'].apply( lambda x: format_graph_week(x)) act_graph_sales = act_graph_sales['actualVolume'] # Split forecast in actual-forecast & future-forecast df_act_fr = sku_analysis_df[ sku_analysis_df['forecastWeek'] < int(start)].copy() df_act_fr = df_act_fr.groupby('forecastWeek', as_index=False)['forecastVolume'].sum() fct_graph_week = df_act_fr['forecastWeek'].apply( lambda x: format_graph_week(x)) fct_graph_sales = df_act_fr['forecastVolume'] df_fut_fr = sku_analysis_df[ sku_analysis_df['forecastWeek'] >= int(start)].copy() df_fut_fr = df_fut_fr.groupby('forecastWeek', as_index=False)['forecastVolume'].sum() fut_fr_graph_week = df_fut_fr['forecastWeek'].apply( lambda x: format_graph_week(x)) fut_fr_graph_sales = df_fut_fr['forecastVolume'] # fct_graph_sales = sku_analysis_df.groupby('forecastWeek', as_index=False)['forecastVolume'].sum() # fct_graph_week = fct_graph_sales['forecastWeek'] # fct_graph_sales = fct_graph_sales['forecastVolume'] dict_graph = {} dict_graph['actualVolume'] = { 'x': np.array(act_graph_week).tolist(), 'y': np.array(act_graph_sales).tolist() } dict_graph['forecastVolume'] = { 'x': np.array(fct_graph_week).tolist(), 'y': np.array(fct_graph_sales).tolist() } dict_graph['futureForecastVolume'] = { 'x': np.array(fut_fr_graph_week).tolist(), 'y': np.array(fut_fr_graph_sales).tolist() } ## REGION: Forecast Retailer SKU Analysis res_ret_sku_analysis = _forecast_retailer_sku_analysis( dfs=forecast_sku_analysis['weeklyForecast'], dfr=df_retailers, analysis_windows=analysis_windows, category=category, country=country, future=ret_sku_analysis) # res_ret_sku_analysis_dict = res_ret_sku_analysis['table'].to_dict('records') res_ret_sku_analysis_dict = res_ret_sku_analysis['table'] # res_ret_sku_analysis_dict.rename(columns={'retForecastVolume': 'forecastVolume'}, inplace=True) res_ret_sku_analysis_dict = res_ret_sku_analysis_dict.to_dict('records') result = { 'forecastResults': { 'forecastSku': { 'weeklyForecast': sku_forecast_week, 'monthlyForecast': sku_forecast_month # 'skuList': sku_list }, 'forecastRetailerSku': { 'weeklyForecast': ret_sku_forecast_week, 'monthlyForecast': ret_sku_forecast_month # 'skuList': sku_list, # 'retailerList': ret_list }, 'forecastSkuAnalysis': { 'table': sku_analysis_df_dict, 'graph': dict_graph, 'totalAccuracy': overall_accuracy, 'totalBias': overall_bias # 'skuList': sku_list }, 'forecastRetailerSkuAnalysis': { 'table': res_ret_sku_analysis_dict, 'graph': { 'forecastAccuracy': res_ret_sku_analysis['accuracy_graph'], 'forecastBias': res_ret_sku_analysis['bias_graph'] } # 'skuList': sku_list, # 'retailerList': ret_list } } } return result