def test_exponentialSmoothing_15(self): ts_data = self.statsmodels_data_helper.getData2() f_name = 'exponential_smoothing15.pmml' model_obj = ExponentialSmoothing(ts_data, trend='mul', damped=False, seasonal=None, seasonal_periods=2) results_obj = model_obj.fit(optimized=True) ExponentialSmoothingToPMML(ts_data, model_obj, results_obj, f_name) self.assertEqual(self.schema.is_valid(f_name), True)
def test_damping_slope_zero(): endog = np.arange(10) mod = ExponentialSmoothing(endog, trend='add', damped=True) res1 = mod.fit(smoothing_level=1, smoothing_slope=0.0, damping_slope=1e-20) pred1 = res1.predict(start=0) assert_allclose(pred1, np.r_[0., np.arange(9)], atol=1e-10) res2 = mod.fit(smoothing_level=1, smoothing_slope=0.0, damping_slope=0) pred2 = res2.predict(start=0) assert_allclose(pred2, np.r_[0., np.arange(9)], atol=1e-10) assert_allclose(pred1, pred2, atol=1e-10)
def ETSregression(history, config): t, d, s, p, b, r = config # define model history = np.array(history) model = ExponentialSmoothing(history, trend=t, damped=d, seasonal=s, seasonal_periods=p) # fit model model_fit = model.fit(optimized=True, use_boxcox=b, remove_bias=r) return model_fit
def difference_predict(input_array): # 获取要比较的数据 es = ExponentialSmoothing(input_array) model = es.fit() predict_array = model.predict() print input_array print predict_array difference_array = [] for i in range(1, len(predict_array)): difference_array.append(abs(predict_array[i] - input_array[i - 1])) return difference_array
def generate_fit(self): # fit the model fit1 = ExponentialSmoothing(np.asarray(self.train['Count']), seasonal_periods=self.seasonal_periods, trend='add', seasonal='add').fit() fit2 = ExponentialSmoothing(np.asarray(self.train['Count']), seasonal_periods=self.seasonal_periods, trend='add', seasonal='mul').fit() fit3 = ExponentialSmoothing(np.asarray(self.train['Count']), seasonal_periods=self.seasonal_periods, trend='add', seasonal='add', damped=True).fit() fit4 = ExponentialSmoothing(np.asarray(self.train['Count']), seasonal_periods=self.seasonal_periods, trend='add', seasonal='mul', damped=True).fit() return fit1, fit2, fit3, fit4
def get_smooth_predictions(self, candles, prediction_length): results = [] data = candles for i in range(prediction_length): model = ExponentialSmoothing(data, trend="add", initialization_method="estimated") model_fit = model.fit() predict = model_fit.predict()[0] results.append(predict) data.append(predict) return results
def trainHoltsWinterForecastingMethodModel(): X_train = readHoltsWinterForecastingMethodXTrain() #training model on the training set holtsWinterForecastingMethodModel = ExponentialSmoothing( X_train["Thousands of Passengers"], trend="mul", seasonal="mul", seasonal_periods=12).fit() saveHoltsWinterForecastingMethodModel(holtsWinterForecastingMethodModel)
def _fit_one_config(ts_array, trend, seasonal, seasonal_period, damped): try: ets_model = ExponentialSmoothing(ts_array, trend=trend, damped=damped, seasonal=seasonal, seasonal_periods=seasonal_period) return ets_model.fit(optimized=True) except NotImplementedError: return None except FloatingPointError: return None
def test_hw_seasonal(self): fit1 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='additive', seasonal='additive').fit(use_boxcox=True) fit2 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='add', seasonal='mul').fit(use_boxcox=True) fit3 = ExponentialSmoothing(self.aust, seasonal_periods=4, seasonal='add').fit(use_boxcox=True) fit4 = ExponentialSmoothing(self.aust, seasonal_periods=4, seasonal='mul').fit(use_boxcox=True) fit5 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='mul', seasonal='add' ).fit(use_boxcox='log') fit6 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='multiplicative', seasonal='multiplicative' ).fit(use_boxcox='log') assert_almost_equal(fit1.forecast(8), [61.34,37.24,46.84,51.01,64.47,39.78,49.64,53.90], 2) assert_almost_equal(fit2.forecast(8), [60.97,36.99,46.71,51.48,64.46,39.02,49.29,54.32], 2) assert_almost_equal(fit3.forecast(8), [59.91,35.71,44.64,47.62,59.91,35.71,44.64,47.62], 2) assert_almost_equal(fit4.forecast(8), [60.71,35.70,44.63,47.55,60.71,35.70,44.63,47.55], 2) assert_almost_equal(fit5.forecast(1), [78.53], 2) assert_almost_equal(fit6.forecast(1), [54.82], 2)
def ets(demand, validation_points, trend, seasonal): model_count = 0 for param_trend in trend: for param_seasonal in seasonal: error = np.empty(shape=(0, 0)) ets_cv_error = [] for split_count in range(1, validation_points - 1): demand_train, demand_valid = split_data( demand, validation_points, split_count) if len(demand_valid) != params.validation_steps: break try: ets_fit = ExponentialSmoothing( demand_train, trend=param_trend, seasonal=param_seasonal, seasonal_periods=params.validation_steps).fit() ets_fcast = ets_fit.forecast(steps=params.validation_steps) error = mean_squared_error(demand_valid, ets_fcast) except: traceback.print_exc() error = float('Inf') ets_cv_error = np.append(ets_cv_error, error) ets_mean_error = np.nanmean(ets_cv_error) if math.isnan(ets_mean_error): ets_mean_error = float('Inf') if (model_count == 0): ets_best_error = ets_mean_error ets_best_model = [param_trend, param_seasonal] if ets_mean_error < ets_best_error: ets_best_error = ets_mean_error ets_best_model = [param_trend, param_seasonal] model_count = model_count + 1 ets_best_fit = ExponentialSmoothing( np.array(demand, dtype='double'), trend=ets_best_model[0], seasonal=(None if ets_best_model[1] is None else ets_best_model[1]), seasonal_periods=params.seasons, damped=True).fit() return [ets_best_model, ets_best_error, ets_best_fit]
def predict(num): df = pd.read_csv(r'G:/temp/processed_data.csv', parse_dates=['Payment Date'], index_col='Payment Date') model_dict = {} for client in df['Client Name'].unique(): model1 = ExponentialSmoothing(np.asarray( df[df['Client Name'] == client]['Paid Amount']), trend='add', seasonal='add', seasonal_periods=12, damped=True) hw_model1 = model1.fit() model2 = ExponentialSmoothing(np.asarray( df[df['Client Name'] == client]['Paid Amount']), trend='add', seasonal='add', seasonal_periods=12) hw_model2 = model1.fit() model_dict[ client] = hw_model1 if hw_model1.aic < hw_model2.aic else hw_model2 predicted_amounts = {} for client, model in model_dict.items(): pred = model.forecast(12) predicted_amounts[client] = pred.mean() values = list(predicted_amounts.values()) values.sort() result = {} for i in range(1, num + 1): result[get_key(values[-1 * i], predicted_amounts)] = values[-1 * i] return result
def optimize_des(train, alphas, betas, step=48): print("Optimizing parameters...") results = [] for alpha in alphas: for beta in betas: des_model = ExponentialSmoothing(train, trend="add").\ fit(smoothing_level=alpha, smoothing_slope=beta) y_pred = des_model.forecast(step) mae = mean_absolute_error(test, y_pred) results.append([round(alpha, 2), round(beta, 2), round(mae, 2)]) results = pd.DataFrame(results, columns=["alpha", "beta", "mae"]).\ sort_values("mae") print(results)
def holt_winter_fcast(df): output = [] for name in lst: final_model = ExponentialSmoothing(df[name], trend='mul', seasonal='mul', seasonal_periods=12).fit() forecast_predictions = final_model.forecast(12).rename(name) output.append(forecast_predictions) output = pd.concat(output, axis=1) frames = [df, output] resulttest = pd.concat(frames) return resulttest
def get_preds(self, X): results = [] for x in X: model = ExponentialSmoothing( x, trend=self.trend, seasonal=self.seasonal, seasonal_periods=self.seasonal_periods, damped=self.damped).fit(optimized=self.optimized, use_brute=self.use_brute) preds = model.forecast(self.n_preds) results.append(preds[self.n_preds - 1]) return results
def exp_smoothing_forecast(history, config): t, d, s, p, b, r = config # define model model = ExponentialSmoothing(history, trend=t, damped=d, seasonal=s, seasonal_periods=p) # fit model model_fit = model.fit(optimized=True, use_boxcox=b, remove_bias=r) # make one step forecast yhat = model_fit.predict(len(history), len(history)) return yhat[0]
def home(): chart_info_expenses = get_total_expenses_by_month(session.get('user_id'),datetime.datetime.now().year) chart_info_revenues = get_total_revenues_by_month(session.get('user_id'),datetime.datetime.now().year) df_expenses = pd.DataFrame(chart_info_expenses) df_revenues = pd.DataFrame(chart_info_revenues) df_joined = pd.merge(df_expenses, df_revenues, how='outer', on=['mes', 'mes'], suffixes=['_expenses', '_revenues']) df_joined = df_joined.fillna(0.0) data_expenses = [] if len(df_joined['total_expenses'].values) == 1: data_expenses = [0,df_joined['total_expenses'].values] else: data_expenses = df_joined['total_expenses'].values model = ExponentialSmoothing(data_expenses) model_fit = model.fit() next_step_expenses = model_fit.predict(len(data_expenses), len(data_expenses)) data_revenues = [] if len(df_joined['total_revenues'].values) == 1: data_revenues = [0,df_joined['total_revenues'].values] else: data_revenues = df_joined['total_revenues'].values model = ExponentialSmoothing(data_revenues) model_fit = model.fit() next_step_revenues = model_fit.predict(len(data_revenues), len(data_revenues)) next_step = next_step_revenues[0] - next_step_expenses[0] next_step = round(next_step,2) df_joined['diff'] = df_joined['total_revenues'] - df_joined['total_expenses'] diff_values = df_joined[['diff','mes']].T.to_dict() return render_template('restrict/analysis.html',chart_info_expenses=json.dumps(chart_info_expenses), chart_info_revenues=json.dumps(chart_info_revenues), current_year=datetime.datetime.now().year,diff_values=json.dumps(diff_values),next_step=next_step,info_diff=df_joined['diff'].sum())
def run_method(): # config plt.style.use('bmh') sns.set_style("whitegrid") plt.rc('xtick', labelsize=15) plt.rc('ytick', labelsize=15) warnings.filterwarnings("ignore") pd.set_option('max_colwidth', 100) pd.set_option('display.max_rows', 500) pd.set_option('display.max_columns', 500) color_pal = plt.rcParams['axes.prop_cycle'].by_key()['color'] color_cycle = cycle(plt.rcParams['axes.prop_cycle'].by_key()['color']) # 导入数据集: data = pd.read_csv(str(proj_root_dir / 'data/data_for_tsa.csv')) data['date'] = pd.to_datetime(data['date']) print(data.head()) train = data[data['date'] <= '2016-03-27'] test = data[(data['date'] > '2016-03-27') & (data['date'] <= '2016-04-24')] # plot data fig, ax = plt.subplots(figsize=(25, 5)) train.plot(x='date', y='demand', label='Train', ax=ax) test.plot(x='date', y='demand', label='Test', ax=ax); predictions = pd.DataFrame() predictions['date'] = test['date'] stats = pd.DataFrame(columns=['Model Name', 'Execution Time', 'RMSE']) # 开始调用具体方法 t0 = time.time() model_name = 'Double Exponential Smoothing' # train doubleExpSmooth_model = ExponentialSmoothing(train['demand'], trend='add', seasonal_periods=7).fit() t1 = time.time() - t0 # predict predictions[model_name] = doubleExpSmooth_model.forecast(28).values # visualize fig, ax = plt.subplots(figsize=(25, 4)) train[-28:].plot(x='date', y='demand', label='Train', ax=ax) test.plot(x='date', y='demand', label='Test', ax=ax); predictions.plot(x='date', y=model_name, label=model_name, ax=ax); # evaluate score = np.sqrt(mean_squared_error(predictions[model_name].values, test['demand'])) print('RMSE for {}: {:.4f}'.format(model_name, score)) stats = stats.append({'Model Name': model_name, 'Execution Time': t1, 'RMSE': score}, ignore_index=True) print("stats: %s" % (stats,)) plt.show()
def exp_smoothing_forecast(history, config): t, d, s, p, b, r = config # Define the model history = array(history) model = ExponentialSmoothing(history, trend=t, damped=d, seasonal=s, seasonal_periods=p) # Fit the model model_fit = model.fit(optimized=True, use_boxcox=b, remove_bias=r) # Make a one-step forecast y_hat = model_fit.predict(len(history), len(history)) return y_hat[0]
def cost(parameters, *args): parameters = 0.1, 0.1,0.0001,30 #parameters = a alpha,beta,gamma, periodo = parameters dataset = args #d = data dataset = np.reshape(np.array(dataset).astype(float),-1,1) Cost = [] train=np.reshape(np.array(dataset).astype(float),-1,1) ets_model = ExponentialSmoothing(train, trend='add', seasonal='add', seasonal_periods= int( periodo)) ets_fit = ets_model.fit(smoothing_level= alpha, smoothing_slope= beta, smoothing_seasonal= gamma) y_pred = ets_fit.predict(start=1, end=len(dataset)) RMSE = metrics(np.array(dataset),y_pred) return RMSE*100
def trainHoltsWinterForecastingMethodModelOnFullDataset(): holtsWinterForecastingMethodDataset = importHoltsWinterForecastingMethodDataset( "airline_passengers.csv") #training model on the whole dataset holtsWinterForecastingMethodModel = ExponentialSmoothing( holtsWinterForecastingMethodDataset["Thousands of Passengers"], trend="mul", seasonal="mul", seasonal_periods=12).fit() saveHoltsWinterForecastingMethodModelForFullDataset( holtsWinterForecastingMethodModel)
def evaluate(DATA, se, serie): sns.set(rc={'figure.figsize': (7, 5)}) sns.set_style("whitegrid") RMSE = [] MF = [] SMAPE = [] data = DATA[:, serie] colors = np.linspace(start=100, stop=255, num=90) y = [] RESID = [] #fig = plt.gcf() #fig.set_size_inches(18.5, 10.5) for i in range(serie, 90, 3): alpha, beta, gamma, periodo = se[i] train = data test = data[130:] t = np.reshape(np.array(train).astype(float), -1, 1) ets_model = ExponentialSmoothing(t, trend='add', seasonal='add', seasonal_periods=int(periodo)) ets_fit = ets_model.fit(smoothing_level=alpha, smoothing_slope=beta, smoothing_seasonal=gamma) y_pred = ets_fit.predict(start=len(train), end=len(train) + len(test)) y_pred = ets_fit.predict(start=131, end=130 + len(test)) #y_pred = ets_fit.predict(start=101, end= 130+len(test)) plt.plot(y_pred, color=plt.cm.Reds(int(colors[i])), alpha=.9) mf, smape, m = metrics(test, y_pred) test = test.reshape(-1, 1) y_pred = y_pred.reshape(-1, 1) RESID.append(np.subtract(test, y_pred)) RMSE.append(m) SMAPE.append(smape) MF.append(mf) y.append(y_pred) #ACC.append(ConfusionMatrix(testY,y_pred,LimI[i],LimS[i])) plt.grid(linestyle='dashed') plt.plot(data[130:], label="Original dataset", linewidth=3) y = np.array(y) y_max = [] y_min = [] for i in range(y.shape[0]): Y_max, Y_min = confIntMean(np.array(y[:, i]), 0.975) y_max.append(Y_max) y_min.append(Y_min) return np.array(MF), np.array(SMAPE), np.array(RMSE), y_max, y_min #,RESID
def train(self, **kwargs): for i in range(2): use_boxcox = [True, False][i] try: self.model = ExponentialSmoothing( self.train_df, seasonal_periods=self.seasons, trend="add", seasonal="add", damped_trend=True, ).fit(use_boxcox=use_boxcox) break except: continue
def ets_fit_model(train_data: list, config: list = ["add", True, "add", True, True]) -> list: """Takes in a test data set and parameters and returns a list of predictions""" t, d, s, b, r = config if s is None: model = ExponentialSmoothing( train_data, trend=t, damped_trend=d, use_boxcox=b, initialization_method="estimated", ) else: model = ExponentialSmoothing( train_data, trend=t, damped_trend=d, use_boxcox=b, initialization_method="estimated", seasonal=s, seasonal_periods=366, ) return model.fit()
def calculate_time_serie(data, time_serie_type, trend_seasonal, period, forecast): if time_serie_type == 'simpsmoothing': data_simp_exp = SimpleExpSmoothing(data).fit() proyeccion = data_simp_exp.forecast(int(forecast)) return data_simp_exp.fittedvalues, proyeccion elif time_serie_type == 'holt': data_holt = Holt(data).fit() proyeccion = data_holt.forecast(int(forecast)) return data_holt.fittedvalues, proyeccion elif time_serie_type == 'holt_winters': print(trend_seasonal) if trend_seasonal == 'add': print('periodo', period) data_holtwinters = ExponentialSmoothing( data, trend='add', seasonal='add', seasonal_periods=period).fit(use_boxcox=True) print(data_holtwinters.fittedvalues) elif trend_seasonal == 'mult': data_holtwinters = ExponentialSmoothing( data, trend='mul', seasonal='mul', seasonal_periods=period).fit(use_boxcox=True) proyeccion = data_holtwinters.forecast(int(forecast)) return data_holtwinters.fittedvalues, proyeccion elif time_serie_type == 'arima': arima = pmdarima.auto_arima(data, seasonal=False, error_action='ignore', suppress_warnings=True) proyeccion, int_conf = arima.predict(n_periods=int(forecast), return_conf_int=True) prediccion = arima.predict_in_sample() print('pro', proyeccion) print('pre', prediccion) return prediccion, proyeccion
def train(self, train_set, val_set=None): if self.skip_training: return train_preds = [] for x in train_set.X: self.model = ExponentialSmoothing( x, trend=self.trend, seasonal=self.seasonal, seasonal_periods=self.seasonal_periods, damped=self.damped).fit(optimized=self.optimized, use_brute=self.use_brute) preds = self.model.forecast(self.n_preds) train_preds.append(preds[-1]) train_R = eva_R(np.array(train_preds), train_set.y) if not val_set is None: val_X = val_set.X val_y = val_set.y val_preds = [] for x in val_X: model = ExponentialSmoothing( x, trend=self.trend, seasonal=self.seasonal, seasonal_periods=self.seasonal_periods, damped=self.damped).fit(optimized=self.optimized, use_brute=self.use_brute) preds = model.forecast(self.n_preds) val_preds.append(preds[self.n_preds - 1]) val_R = eva_R(np.array(val_preds), val_ys) print('Test R^2: {0}'.format(val_R)) if not path.exists(self.ckpt_path): makedirs(self.ckpt_path) self.model.save(path.join(self.ckpt_path, self.name + '.pickle')) print('Save Model to ' + path.join(self.ckpt_path, self.name + '.pickle'))
def post(self, user_id): method = request.form['method'] # donnees = request.form['donnees'] data0 = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1] #seqencely augmentation data1 = [1, 2, 3, 3, 4, 7, 15, 24, 43, 67, 94] #aumente slowly then fast data2 = [ 1, 12, 23, 43, 74, 89, 1, 20, 23, 43, 74, 89, 1, 12, 23, 43, 74, 89, 1, 12, 23, 43, 74, 89, 1, 13 ] #aumente fast data_season1 = [ 0, 0, 6, 14, 30, 41, 41, 54, 66, 71, 85, 89, 0, 1, 4, 13, 25, 32, 44, 44, 62, 69, 81, 85 ] data_season2 = [ 0, 0, 6, 14, 30, 41, 41, 54, 66, 71, 85, 89, 0, 1, 4, 13, 25, 32, 44, 44, 62, 69, 81, 85, 0, 1 ] data_season3 = [ 0, 0, 6, 14, 30, 41, 41, 54, 66, 71, 85, 89, 0, 1, 4, 13, 25, 32, 44, 44, 62, 69, 81, 85, 0, 1, 6, 13, 23, 30, 44 ] data_season4 = [ 0, 0, 6, 14, 30, 41, 41, 54, 66, 71, 85, 89, 0, 1, 4, 13, 25, 32, 44, 44, 62, 69, 81, 85, 0, 1, 6, 13, 23, 30, 44, 44, 50, 67, 72, 89 ] res = [] data = data0 for i in range(1, 4): if (method == "SARIMA"): #with seasonal model = SARIMAX(data, order=(1, 1, 1), seasonal_order=(1, 1, 1, 1)) model_fit = model.fit(disp=False) if (method == "HWES"): #with seasonal model = ExponentialSmoothing(data) model_fit = model.fit() # make prediction yhat = model_fit.predict(len(data), len(data)) data.append(yhat[0]) res.append(yhat[0]) obj = {'prevision': res} return obj
def predict_stock_quantity(data, product_id, shop_id, current_date, prediction_length=14): """ Predicts stock quantity for some random next_days_num from the given current_date, given prev_days_num days. Emulates one cycle before a new supply. """ try: le = preprocessing.LabelEncoder() product_df = data[data['product_id'] == product_id] product_df = product_df[product_df['shop_id'] == shop_id] product_df = product_df[['chq_date', 'sales_count']] product_df = product_df.sort_values(by=['chq_date']) product_df = product_df.groupby(['chq_date'], as_index=False).sum() product_df['day_number'] = le.fit_transform(product_df['chq_date']) # np.random.seed(42) prev_days_num = prediction_length #np.random.randint(low=5, high=14) next_days_num = prediction_length current_day_id = product_df[ product_df['chq_date'] <= current_date]['day_number'].values[-1] week_df = product_df[product_df['day_number'] <= current_day_id + next_days_num] week_df = week_df[product_df['day_number'] > current_day_id - prev_days_num] week_df['cum_sum'] = np.cumsum(week_df['sales_count']) week_df['stock_amount'] = week_df['cum_sum'].apply( lambda x: np.sum(week_df['sales_count']) - x) week_df['stock_amount'] += np.random.randint(low=1, high=100, size=week_df.shape[0]) train_demand = week_df[week_df['chq_date'] <= current_date][ 'stock_amount'].values[-prev_days_num:] model = ExponentialSmoothing(train_demand, trend='mul') fit = model.fit() predictions = fit.forecast(next_days_num) predictions = np.sort(predictions)[::-1] except Exception as e: print(f"Error on calculation: {e}") print(traceback.format_exc()) predictions = np.zeros(next_days_num) train_demand = predictions return predictions, train_demand
def test_simulate_boxcox(austourists): """ check if simulation results with boxcox fits are reasonable """ fit = ExponentialSmoothing(austourists, seasonal_periods=4, trend="add", seasonal="mul", damped=False).fit(use_boxcox=True) expected = fit.forecast(4).values res = fit.simulate(4, repetitions=10).values mean = np.mean(res, axis=1) assert np.all(np.abs(mean - expected) < 5)
def test_hw_seasonal(self): fit1 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='additive', seasonal='additive').fit(use_boxcox=True) fit2 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='add', seasonal='mul').fit(use_boxcox=True) assert_almost_equal( fit1.forecast(8), [61.34, 37.24, 46.84, 51.01, 64.47, 39.78, 49.64, 53.90], 2) assert_almost_equal( fit2.forecast(8), [60.97, 36.99, 46.71, 51.48, 64.46, 39.02, 49.29, 54.32], 2) fit5 = ExponentialSmoothing(self.aust, seasonal_periods=4, trend='mul', seasonal='add').fit(use_boxcox='log') fit6 = ExponentialSmoothing( self.aust, seasonal_periods=4, trend='multiplicative', seasonal='multiplicative').fit(use_boxcox='log')
def holt_winters_method(self): parameters = [[10, 'add', 'add'], [6, 'add', 'add']] self.create_fig_with_data() for p, tr, seas in parameters: fit1 = ExponentialSmoothing(self.data_of_shares.Average, seasonal_periods=p, trend=tr, seasonal=seas).fit() y_hat = fit1.fittedvalues plt.plot( y_hat[0:self.amount_of_test_data], label=f'HoltWinters periods={p}, trend={tr} seasonal = {seas}' ) plt.legend(loc='best') plt.savefig("static/results/holt_winters_method")