def holtwinters_cv(data, tests, test_size, results, seasonal_periods): for test_number in range(1, tests + 1): test_split = len(data) - test_size * (tests - test_number + 1) df_train = data[:test_split][[target_variable]] res = ExponentialSmoothing( df_train, seasonal_periods=seasonal_periods, trend="add", seasonal="add", damped=True, ).fit(use_boxcox=True) # for the first test take also the insample predictions if test_number == 1: predictions = [ value if value > 0 else 0 for value in res.fittedvalues[-test_size:] ] + [value if value > 0 else 0 for value in res.forecast(test_size)] else: predictions = predictions + [ value if value > 0 else 0 for value in res.forecast(test_size) ] results["holtwinters"] = predictions return results
def predict_air(city, date): data = conn_mysql(city=city) df = pd.DataFrame(data, columns=["city", "date", "aqi", "pm2_5"]) df.index = pd.to_datetime(df["date"].values, format="%Y-%m-%d") df["aqi"] = df["aqi"].astype("float").round(decimals=2) df["pm2_5"] = df["pm2_5"].astype("float").round(decimals=2) df = df.sort_index() fit_aqi = ExponentialSmoothing(np.asarray(df["aqi"]), seasonal_periods=12, trend="add", seasonal="add").fit() fit_pm2_5 = ExponentialSmoothing(np.asarray(df["pm2_5"]), seasonal_periods=12, trend="add", seasonal="add").fit() res_aqi = fit_aqi.forecast(7) res_pm2_5 = fit_pm2_5.forecast(7) # print(res) predict_aqi = pd.Series(res_aqi) predict_aqi.index = pd.date_range(date[0], date[1], freq="D") predict_pm2_5 = pd.Series(res_pm2_5) predict_pm2_5.index = pd.date_range(date[0], date[1], freq="D") # print(predict_aqi) # print(predict_pm2_5) # rms_aqi = math.sqrt(mean_squared_error(df.aqi[2215:], predict_aqi.values)) # rms_pm2_5 = math.sqrt(mean_squared_error(df.pm2_5[2215:], predict_pm2_5.values)) # print("aqi rms:",rms_aqi,"\n","pm2.5 rms:",rms_pm2_5) # 计算拟合度 # ret = pd.DataFrame(list(zip(predict_aqi, predict_pm2_5)), columns=["aqi","pm2.5"]) # 拼接为DataFrame return ([ predict_aqi.index.strftime('%Y-%m-%d').tolist(), predict_aqi.values.round(decimals=2).tolist(), predict_pm2_5.values.round(decimals=2).tolist() ]) # [[aqi],[pm2.5]])
def TrainSimple(train_data, real_data, prediction_days): fit1 = ExponentialSmoothing(train_data).fit(smoothing_level=0.1, optimized=True) fit2 = ExponentialSmoothing(train_data).fit(optimized=True) fit3 = ExponentialSmoothing(train_data).fit(smoothing_level=0.9, optimized=True) fcast1 = fit1.forecast(prediction_days) fcast2 = fit2.forecast(prediction_days) fcast3 = fit3.forecast(prediction_days) plot_time_series( { r'Real Data': [real_data, None], r'1.Simple Exponential, $\alpha=%0.2f$' % fit1.params['smoothing_level']: [fcast1, fit1], r'2.Simple Exponential, $\alpha=%0.2f$' % fit2.params['smoothing_level']: [fcast2, fit2], r'3.Simple Exponential, $\alpha=%0.2f$' % fit3.params['smoothing_level']: [fcast3, fit3], }, 'Prediction of COVID-19 Cases in Kurdistan-Region,Iraq\n Using Simple Exponential Smoothing on Daily Cases Data of March-May 2020\n', 'simple_models_default') # return [fit1, fit2, fit3]
def holtWinters_DES_forecast(self, series, forecast_range, model_type): if model_type == 'add': des_add = ExponentialSmoothing(series, trend = 'add').fit().fittedvalues.shift(-1) des_add_pred = des_add.forecast(forecast_range) return des_add_pred elif model_type = 'mul': des_mul = ExponentialSmoothing(series, trend = 'mul').fit().fittedvalues.shift(-1) des_mul_pred = des_mul.forecast(forecast_range) return des_mul_pred
def holtWinters_TES_forecast(self, series, forecast_range : int, model_type): if model_type == 'add': tes_add = ExponentialSmoothing(series, trend = 'add', seasonal = 'add', seasonal_periods= 12).fit().fittedvalues tes_add_pred = tes_add.forecast(forecast_range) return tes_add_pred elif model_type == 'mul': tes_mul = ExponentialSmoothing(series, trend = 'mul', seasonal = 'mul', seasonal_periods= 12).fit().fittedvalues tes_mul_pred = tes_mul.forecast(forecast_range) return tes_mul_pred
def predict_pm2_5(): data = conn_mysql(city="南昌") # print(data) df = pd.DataFrame(data, columns=["city", "date", "aqi", "pm2_5"]) df.index = pd.to_datetime(df["date"].values, format="%Y-%m-%d") df["pm2_5"] = df["pm2_5"].astype("float").round(decimals=2) df = df.sort_index() # print(df["pm2_5"]) fit1 = ExponentialSmoothing(np.asarray(df["pm2_5"]), seasonal_periods=12, trend="add", seasonal="add").fit() res = fit1.forecast(7) # print(res) predict_data = pd.Series(res) predict_data.index = pd.date_range("20200101", "20200107", freq="D") print(predict_data) concat_data = pd.concat([df["pm2_5"], predict_data.round(decimals=2)]) rms = math.sqrt(mean_squared_error(df.pm2_5[2215:], predict_data.values)) print(rms) # # 绘图 plt.figure(figsize=(16, 8)) plt.plot(concat_data, label="Concat_Data", color="blue") # 拼接数据 plt.plot(df["pm2_5"], label="Original", color="red") # 原数据 plt.plot(predict_data, label="Holt_Winter", color="green") # 预测数据 plt.legend(loc="best") plt.title("南昌" + "市2014-2019年pm2_5折线图及对2020年的预测->RMSE:%.2f" % rms) plt.show()
def exponential_smoothing(data: pd.DataFrame, n_months: int): """Функция выполняет прогноз цен с использованием модели экспоненциального сглаживания.""" # Используем для обучения модели период, равный двум прогнозным периодам: prev_days = n_months * 30 * 2 model = ExponentialSmoothing(data['price'].tail(prev_days), trend='add').fit() forecast = model.forecast(n_months * 30) # Преобразуем полученный прогноз в датафрейм и включаем # model.fittedvalues и будущий период с датами и ценами. forecast_df = pd.DataFrame({ 'date': data['date'].tail(prev_days), 'price': model.fittedvalues }) next_day = forecast_df['date'].max() + pd.to_timedelta('1 days') periods = n_months * 30 future_dates = pd.date_range(start=next_day, periods=periods, freq='D') forecast_df = forecast_df.append( pd.DataFrame({ 'date': future_dates, 'price': forecast })) return forecast_df
def holt_winter_method(s, pre_len, season_period, trend='add', seasonal='add'): fit1 = ExponentialSmoothing(np.asarray(s), seasonal_periods=season_period, trend=trend, seasonal=seasonal).fit() return fit1.forecast(pre_len)
def holts_winter(input_df, kunag, matnr, n): i = 0 lst = [] test1 = train_test_split(df, kunag, matnr, n)[1] y_hat_avg = test1.copy() for i in range(n, 0, -1): train, test = train_test_split(df, kunag, matnr, i) dd = np.asarray(train["quantity"]) fit1 = ExponentialSmoothing( np.asarray(train['quantity']), seasonal_periods=4, trend='add', seasonal='add', ).fit() y_hat_avg['Holt_Winter'] = fit1.forecast(len(test1)) pred = y_hat_avg['Holt_Winter'] lst.append(pred.iloc[-1]) pd.DataFrame(lst) y_hat_avg['pred_column'] = lst plt.figure(figsize=(12, 8)) plt.plot(train.set_index("date")['quantity'], label='Train', marker='.') plt.plot(test1.set_index("date")['quantity'], label='Test', marker='.') plt.plot(y_hat_avg.set_index("date")['pred_column'], label='Holts Winter', marker='.') plt.legend(loc='best') plt.title("Holts Winter") plt.show() rms = sqrt(mean_squared_error(test1.quantity, y_hat_avg.pred_column)) mae = mean_absolute_error(test1.quantity, y_hat_avg.pred_column) del y_hat_avg['Holt_Winter'] return y_hat_avg, rms, mae
class Holtwinter(Modelling): def __init__(self, data, forecastMessure, seasonal_periods, WStRMSEOpt, WMAPEOpt, product): Modelling.__init__(self, data, forecastMessure) self.data = data self.forecastMessure = forecastMessure self.seasonal_periods = seasonal_periods self.WStRMSEOpt = WStRMSEOpt self.WMAPEOpt = WMAPEOpt self.product = product def grid(self, trend): self.dictionary = { 'trend': ['add'], 'seasonal': ['add', 'mul', 'additive', 'multiplicative'], 'damped': ['True', 'False'], 'seasonal_periods': '12' } self.data_treat_comb = expand_grid(self.dictionary) return self.data_treat_comb def fit(self, train, trend, seasonal): self.fit1 = ExponentialSmoothing( np.asarray(train[[self.forecastMessure]]), seasonal_periods=self.seasonal_periods, trend=trend, seasonal=seasonal).fit(use_boxcox=True) return self.fit1 def forecast(self, forecastdays=10): pred = self.fit1.forecast(forecastdays) return pred
def holt_winter(data, col, train, test, sp, t, s, frequency): """ data- Entire Data Frame col- Target value train - Train Data Frame test - Test Data Frame sp - Seasonality period t - Trend - add/multiplicative s- Seasonal - add/multiplicative """ y_hat_avg = test.copy() fit1 = ExponentialSmoothing(np.asarray(train[col]) ,seasonal_periods=sp ,trend=t, seasonal=s).fit() y_hat_avg['Holt_Winter'] = fit1.forecast(len(test)) #To print rms rms = rmse(test[col], y_hat_avg.Holt_Winter) print('RMSE', rms) #To plot the results plt.figure(figsize=(16,8)) plt.plot( train[col], label='Train') plt.plot(test[col], label='Test') plt.plot(y_hat_avg['Holt_Winter'], label='Holt_Winter') plt.legend(loc='best') plt.savefig(frequency+'holtswinter.png')
def HLM_winter_model(train, test): #alpha=smoothing_level and beta=smoothing slope fit1 = ExponentialSmoothing(train, seasonal_periods=365, trend='mul', seasonal='mul', damped=False).fit(optimized=True, use_boxcox=False, remove_bias=True) fcast = fit1.forecast(len(test)) plt.figure(figsize=(18, 8)) plt.plot(train, label='train data', color='black') plt.plot(test, label='test data', color='green') plt.plot(fcast, label='forecast', color='red') plt.legend(loc='best') plt.title('Load Forecast using HLM winter Method', fontsize=15) plt.xlabel('day----->') plt.ylabel('Consumption in Mwh') plt.show() results = pd.DataFrame(index=[ r"$\alpha$", r"$\beta$", r"$\phi$", r"$\gamma$", r"$l_0$", "$b_0$", "SSE" ]) params = [ 'smoothing_level', 'smoothing_slope', 'damping_slope', 'smoothing_seasonal', 'initial_level', 'initial_slope' ] results["Additive"] = [fit1.params[p] for p in params] + [fit1.sse] print(results) print("Verification of HLM winter Forecasting Model") modelverification(fit1, fcast, test) return (fit1)
def pca_ets(data, data_test, dept, seasonal='add'): name = 'pca_ets' pca_data = pca_decomposition(data, dept) idx = pca_data.columns condition = data_test['Dept'] == dept for store in data_test[condition]['Store'].unique(): try: print('predict store:', store) fcst_len = get_fcst_len(store, dept, data, data_test) ts = pca_data.loc[:, store] fit = ExponentialSmoothing(ts, seasonal=seasonal, seasonal_periods=52, trend=None).fit(optimized=True, remove_bias=True) fcst = fit.forecast(fcst_len) fcst_df = pd.DataFrame(fcst) fcst_df['Store'] = store fcst_df['Dept'] = dept send_message(store, name, store=True, fail=False) yield fcst_df.reset_index() except: print(' fail store {} '.format(store)) # slack send_message(store, name, store=True, fail=True) pass
def forecast_hwes(og_df): if len(og_df) <= 1: result = [0, 0] else: df = og_df.copy() train = aggregate_by_day(df) test = train.copy() test = test.reindex(create_split(train)) y_hat_avg = test.copy() fit1 = ExponentialSmoothing( np.asarray(train['Count']), seasonal_periods=7, trend='add', seasonal='add', ).fit() y_hat_avg['Holt_Winter'] = fit1.forecast(len(test)) plt.figure(figsize=(16, 8)) plt.plot(train['Count'], label='Train') plt.plot(y_hat_avg['Holt_Winter'], label='Holt Winter') plt.legend(loc='best') # plt.show() # get max y value and index (x) date_projected = str(y_hat_avg['Holt_Winter'].idxmax()) qty_projected = str(y_hat_avg.loc[y_hat_avg['Holt_Winter'].idxmax(), 'Holt_Winter']) result = [date_projected, qty_projected] return result
def WMES(time_serie, order, n_point): fit2 = ExponentialSmoothing(time_serie, seasonal_periods=order, trend='add', seasonal='mul').fit(use_boxcox=True) result = fit2.forecast(n_point) return result
def HOLTS_WINTER(): fit1 = ExponentialSmoothing(df.sales_result, seasonal_periods=4, trend='add', seasonal='add').fit(use_boxcox=True) fit2 = ExponentialSmoothing(df.sales_result, seasonal_periods=4, trend='add', seasonal='mul').fit(use_boxcox=True) fit3 = ExponentialSmoothing(df.sales_result, seasonal_periods=4, trend='add', seasonal='add', damped=True).fit( use_boxcox=True) fit4 = ExponentialSmoothing(df.sales_result, seasonal_periods=4, trend='add', seasonal='mul', damped=True).fit( use_boxcox=True) fit1.fittedvalues.plot(style='--', color='red') fit2.fittedvalues.plot(style='--', color='green') fit1.forecast(12).plot(style='--', marker='o', color='red', legend=True) fit2.forecast(12).plot(style='--', marker='o', color='green', legend=True) plt.show() print( "Forecasting sales of properties using Holt-Winters method with both additive and multiplicative seasonality.")
def holt_winters(train, test, value, seasons): # Holt-Winters # print("Holt_Winter") y_hat_avg = test.copy() array = np.asarray(train[value]) fit = ExponentialSmoothing( array ,seasonal_periods=seasons ,trend='add', seasonal='add',).fit() y_hat_avg['Holt_Winter'] = fit.forecast( len(test) ) rms = sqrt(mean_squared_error(test[value], y_hat_avg.Holt_Winter)) return rms
def HoltWinters(ts, hor, sp, trd, sea): fit = ExponentialSmoothing( ts, seasonal_periods=sp, trend=trd, seasonal=sea, ).fit() fcst = fit.forecast(hor) return fcst
def Holt_Winters(paramsList=['pollution.csv', '0.93','pm', 'humidity', 'date'], specialParams=['7']): path = paramsList[0] trainRows = float(paramsList[1]) saveto = 'result.csv' df = pd.read_csv(path, usecols=paramsList[2:]) allRows = df.shape[0] season = specialParams[0] train = df[0:int(allRows*trainRows)] test = df[int(allRows*trainRows)+1:] df['Timestamp'] = pd.to_datetime(df[paramsList[-1]], format='%Y/%m/%d %H:%M') df.index = df['Timestamp'] df = df.resample('D').mean() train['Timestamp'] = pd.to_datetime(train[paramsList[-1]], format='%Y/%m/%d %H:%M') train.index = train['Timestamp'] train = train.resample('D').mean() test['Timestamp'] = pd.to_datetime(test[paramsList[-1]], format='%Y/%m/%d %H:%M') test.index = test['Timestamp'] test = test.resample('D').mean() y_hat = test.copy() nullArray = train.copy() nullArray['time'] = train.index # 以上可通用---------------------------- for i in range(2,len(paramsList)-1): print("进入循环") fit1 = ExponentialSmoothing(np.asarray(train[paramsList[i]]), seasonal_periods=int(season), trend='add', seasonal='add').fit() y_hat[paramsList[i]] = fit1.predict(start="2014/7/3", end="2014/9/21") y_hat[paramsList[i]] = round(y_hat[paramsList[i]],2) print("结束fit1") rms = sqrt(mean_squared_error(test[paramsList[i]], y_hat[paramsList[i]])) print(rms) y_hat['Holt_Winter'] = fit1.forecast(len(test)) plt.figure(figsize=(16, 8)) plt.plot(train[paramsList[i]], label='Train') plt.plot(test[paramsList[i]], label='Test') plt.plot(y_hat[paramsList[i]], label='Holt_Winter') plt.legend(loc='best') plt.show() y_hat['time'] = test.index yhat_naive = np.array(y_hat) nArray = np.array(nullArray) newArray = np.concatenate((nArray,yhat_naive),axis=0) s = pd.DataFrame(newArray, columns=paramsList[2:]) for i in range(2,len(paramsList)-1): s[paramsList[i]][0:int(len(s)*trainRows)] = "" s.to_csv(saveto,index=False,header=True,float_format='%.2f') '''
def predUsingRollingHorizon(realSeriesVal, frequency=12, startPos=60): totVal = len(realSeriesVal) + 1 toRet = np.zeros(totVal - startPos) for i in range(startPos, totVal): fit1 = ExponentialSmoothing(realSeriesVal[0:i], seasonal_periods=frequency, trend='add', seasonal='add').fit(use_boxcox=True) toRet[i - startPos] = fit1.forecast(1)[0] return toRet
def fit_model(self, n_predict): fit = ExponentialSmoothing(self.train, seasonal_periods=4, trend='add', seasonal='add').fit(use_boxcox=True) forecast = fit.forecast(n_predict) ds = self.ds_test self.forecast = pd.DataFrame({"ds": ds, "yhat": forecast}) return self.forecast
def exponentialSmoothing(x, y, save_fn): pred = [] for data in x: fit = ExponentialSmoothing( (data), seasonal_periods=3, trend='add', seasonal='add', ).fit() pred.append(fit.forecast(1)) save_fn('rate_exponentialSmoothing.txt', np.array(pred), y)
def compute_predictions(self): if self.data is None or self.data.empty: return fit1 = ExponentialSmoothing( np.asarray(self.data['Rates']), seasonal_periods=5, trend='add', seasonal='add', ).fit() x = fit1.forecast(self.waiting_period) self.predicted_data = list(x)
def winter_first(): import matplotlib.pyplot as plt import statsmodels.api as sm from statsmodels.tsa.api import ExponentialSmoothing, SimpleExpSmoothing, Holt dataset = pd.read_csv('count_people.csv') train = dataset data = [] for i in dataset['col']: data.append(int(i)) test = dataset[-5:] y_hat_avg = test.copy() fit1 = ExponentialSmoothing(np.asarray(train['col']), trend='add', ).fit() y_hat_avg['Holt_linear'] = fit1.forecast(len(test)) for i in y_hat_avg['Holt_linear']: data.append(int(i)) setGraf12(data) dataset = pd.read_csv('money.csv') train = dataset data = [] for i in dataset['col']: data.append(int(i)) test = dataset[-5:] y_hat_avg = test.copy() fit1 = ExponentialSmoothing(np.asarray(train['col']), trend='add', ).fit() y_hat_avg['Holt_linear'] = fit1.forecast(len(test)) for i in y_hat_avg['Holt_linear']: data.append(int(i)) setGraf13(data) dataset = pd.read_csv('passagers.csv') train = dataset data = [] for i in dataset['col']: data.append(int(i)) test = dataset[-5:] y_hat_avg = test.copy() fit1 = ExponentialSmoothing(np.asarray(train['col']), trend='add', ).fit() y_hat_avg['Holt_linear'] = fit1.forecast(len(test)) for i in y_hat_avg['Holt_linear']: data.append(int(i)) setGraf14(data) return render_template('index.html',first_graf_link = "/Winter_first",second_graf_link ="/Winter_second",title = "Holt-Winter")
def holt_winters(): N, t, m = 100, 80, 4 realisations = pd.Series(list(sample_seasonal_random_walk(N,m)), range(N)) mod = ExponentialSmoothing(realisations[:t+1], seasonal_periods=4, trend='add', seasonal='add').fit(optimized=True) params = ['smoothing_level', 'smoothing_slope', 'smoothing_seasonal', 'initial_level', 'initial_slope'] results=pd.DataFrame(index=["alpha","beta","gamma","l_0","b_0","SSE"] ,columns=["Holt-Winters'"]) results["Holt-Winters'"] = [mod.params[p] for p in params] + [mod.sse] print(results) forecasts = mod.forecast(N-(t+1)).rename(r'$\alpha=0.5$ and $\beta=0.5$') plot(realisations, pd.Series(np.nan, range(t+1)).append(forecasts)) plot_components(mod) py.show()
def holtWinters_TES(self, train, test, trend, seasonal): if: trend == 'present' and seasonal == True: tes_add = ExponentialSmoothing(train, trend = 'add', seasonal = 'add', seasonal_periods= 12).fit().fittedvalues tes_add_pred = tes_add.forecast(len(test)) rmse_tes_add = rootMeanSquaredError(test, tes_add_pred) tes_mul = ExponentialSmoothing(train, trend = 'mul', seasonal = 'mul', seasonal_periods= 12).fit().fittedvalues tes_mul_pred = tes_mul.forecast(len(test)) rmse_tes_mul = rootMeanSquaredError(test, tes_mul_pred) if rmse_tes_add < rmse_tes_mul: if rmse_tes_add < self.rmse: self.rmse = rmse_tes_add self.__model__ = 'holtWinters_TES_add' self.__model_type__ = 'add' else: if rmse_tes_mul < self.rmse: self.rmse = rmse_tes_mul self.__model__ = 'holtWinters_TES_mul' self.__model_type__ = 'mul'
def holt_winters_method(train, test, value, seasons): # Holt-Winters Method y_hat_avg = test.copy() fit1 = ExponentialSmoothing( np.asarray(train[value]), seasonal_periods=seasons, trend='add', seasonal='add', ).fit() y_hat_avg['Holt_Winter'] = fit1.forecast(len(test)) mape = mean_abs_percentage_error(test[value], y_hat_avg.Holt_Winter) return mape
def holt_winters(): forecast_steps = 1000 fit1 = ExponentialSmoothing(origin_series, seasonal_periods=365, trend='add', seasonal='add').fit(use_boxcox=True) fit2 = ExponentialSmoothing(origin_series, seasonal_periods=365, trend='add', seasonal='mul').fit(use_boxcox=True) fit3 = ExponentialSmoothing(origin_series, seasonal_periods=365, trend='add', seasonal='add', damped=True).fit(use_boxcox=True, damping_slope=0.8) fit4 = ExponentialSmoothing(origin_series, seasonal_periods=365, trend='add', seasonal='mul', damped=True).fit(use_boxcox=True, damping_slope=0.8) fit1.fittedvalues.plot(style='--', color='red', figsize=figsize) fit3.fittedvalues.plot(style='--', color='green', figsize=figsize) fit1.forecast(forecast_steps).plot(style='--', marker=None, color='red', figsize=figsize, legend=True, linewidth=1) fit3.forecast(forecast_steps).plot(style='--', marker=None, color='green', legend=True, linewidth=1, figsize=figsize) plt.show()
def make_forecast(incidents: List[Incident]): """Makes an incident forecast.""" incidents_sorted = sorted(incidents, key=month_grouper) dataframe_dict = {"ds": [], "y": []} for (last_day, items) in groupby(incidents_sorted, month_grouper): dataframe_dict["ds"].append(str(last_day)) dataframe_dict["y"].append(len(list(items))) dataframe = pd.DataFrame.from_dict(dataframe_dict) if dataframe.empty: return [], [] # reset index to by month and drop month column dataframe.index = dataframe.ds dataframe.index.freq = "M" dataframe.drop("ds", inplace=True, axis=1) # fill periods without incidents with 0 idx = pd.date_range(dataframe.index[0], dataframe.index[-1], freq="M") dataframe.index = pd.DatetimeIndex(dataframe.index) dataframe = dataframe.reindex(idx, fill_value=0) row_count, _ = dataframe.shape if row_count > 3: try: forecaster = ExponentialSmoothing(dataframe, seasonal_periods=12, trend="add", seasonal="add").fit() except Exception as e: log.warning(f"Issue forecasting incidents: {e}") return [], [] forecast = forecaster.forecast(12) forecast_df = pd.DataFrame({ "ds": forecast.index.astype("str"), "yhat": forecast.values }) forecast_data = forecast_df.to_dict("series") # drop day data categories = [d[:-3] for d in forecast_data["ds"]] predicted_counts = [ max(math.ceil(x), 0) for x in list(forecast_data["yhat"]) ] return categories, predicted_counts else: return [], []
def predict(self): print("Prediction started") df = pd.read_csv( 'C:/Users/gopasali/PycharmProjects/PyExcersice/prediction/inventory/daily_inventory.csv' ) x = int((len(df) / 100) * 90) # 4717 train = df[0:x] test = df[x:] # Aggregating the dataset at daily level test['report_date'] = pd.to_datetime(test.report_date, format='%Y-%m-%d %H:%M:%S') test.index = test.report_date test = test.resample('D').mean() train['report_date'] = pd.to_datetime(train.report_date, format='%Y-%m-%d %H:%M:%S') train.index = train.report_date train = train.resample('D').mean() y_hat_avg = test.copy() # 2,6 # with pd.option_context('display.max_rows', None, 'display.max_columns', # None): # more options can be specified also # print(train) fit1 = ExponentialSmoothing(np.asarray( train['physical_inventory_quantity_mt']), seasonal_periods=7, trend='add', seasonal='add').fit() y_hat_avg['Prediction_Count'] = fit1.forecast(len(test)) plt.figure(figsize=(12, 5)) plt.plot(train['physical_inventory_quantity_mt'], label='History') plt.plot(test['physical_inventory_quantity_mt'], label='Actual') plt.plot(y_hat_avg['Prediction_Count'], label='Prediction_Count') plt.legend(loc='best') plt.savefig( "C:/Users/gopasali/PycharmProjects/PyExcersice/prediction/web/static/prediction_plot.jpeg" ) rms = math.sqrt( mean_squared_error(test.physical_inventory_quantity_mt, y_hat_avg.Prediction_Count)) print(rms) prediction_results = pd.concat([y_hat_avg]) prediction_results.to_csv( "C:/Users/gopasali/PycharmProjects/PyExcersice/prediction/prediction_output/prediction_report.csv" ) print("Prediction ended")