def Ses(data):
    inter_df = data[['Volume']]
    size = np.sum(data['Date'] <= '12/31/2018')
    train, test = inter_df.iloc[:size, 0], inter_df.iloc[size:, 0]
    model = SimpleExpSmoothing(train).fit()
    pred = model.predict(start=test.index[0], end=test.index[-1])
    return pred
Ejemplo n.º 2
0
def single_exponential_smoothing(alpha, y_test):
    # Accounts only for the level of the series

    # Simple Exponential Smoothing
    ses_model1 = SimpleExpSmoothing(y_test).fit(smoothing_level=alpha,
                                                optimized=True)
    y_pred_ses = ses_model1.predict(31924).rename(
        r'$\alpha=%s$' % ses_model1.model.params['smoothing_level'])

    fig = plt.figure(figsize=(60, 8))
    y_pred_ses[31925:].plot(color='grey', legend=True)
    ses_model1.fittedvalues.plot(color='grey')
    plt.title("Single Exponential Smoothing")
    plt.show()
    fig.savefig('results/SES/final_output.jpg', bbox_inches='tight')

    # print("Predicted values: ", y_pred, "\n")
    mse_ses = mean_squared_error(y_test[31923:-1], y_pred_ses)
    rmse_ses = mean_squared_error(y_pred_ses, y_test[31923:-1], squared=False)
    r2_ses = r2_score(y_test[31923:-1], y_pred_ses)

    # Storing the result in a file: 'load_forecasting_result.txt'
    predicted_test_result = y_pred_ses
    np.savetxt('results/SES/predicted_values.txt', predicted_test_result)
    actual_test_result = y_test
    np.savetxt('results/SES/test_values.txt', actual_test_result)

    return mse_ses, rmse_ses, r2_ses, y_pred_ses
Ejemplo n.º 3
0
def ses(train, test, alpha=0.0):

    if (alpha > 0.0):
        model = SimpleExpSmoothing(train).fit(smoothing_level=alpha,
                                              optimized=False)
        _alpha = '{0:2.1f}'.format(alpha)
    else:
        model = SimpleExpSmoothing(train).fit()
        _alpha = model.model.params['smoothing_level']

    pred = model.predict(start=test.index[0], end=test.index[-1])

    plt.plot(train.index, train, label='Train')
    plt.plot(test.index, test, label='Test')
    plt.plot(pred.index, pred, label=r'SES, $\alpha={0}$'.format(_alpha))
    plt.legend(loc='best')
plt.style.use('fivethirtyeight')

df = pd.read_excel("../00Daily/Egypt.xlsx", squeeze=True, parse_dates=True)
df = df[["Date", "LocalTransmission"]]
df.set_index("Date", inplace=True)
df.dropna(inplace=True)
##df['Date'] = pd.to_datetime(df['Date'])
LocalTransmission = df['LocalTransmission'].astype('int32')
#print (df.head())
print(df.index)

result = SimpleExpSmoothing(df).fit()
print(result.summary())
#print(result.params)
predictions = result.predict(start="2020-03-01", end="2020-05-01")
#accuracy = result.score()
print("Predictions: ", predictions, sep='\n')
##accuracy = result.score()
#print (accuracy)

#result.plot_predict(start="2020-03-01", end="2020-05-01")

plt.plot(predictions)
plt.show()

##def mean_forecast_error(y, yhat):
##    return y.sub(yhat).mean()


def mean_forecast_error(LocalTransmission, predictions):
Ejemplo n.º 5
0

# %%
carbonico.shape


# %%
carbonico_teste.shape


# %%
len(carbonico) - len(carbonico_teste)


# %%
modelo_previsto = modelo_ajustado.predict(start = len(carbonico) - len(carbonico_teste),end=len(carbonico)-1)


# %%
plt.figure(figsize=(8,4))
plt.plot(carbonico_treino)
plt.plot(carbonico_teste,'g')
plt.plot(carbonico_teste.index, modelo_previsto, 'r.')


# %%
modelo_ajustado = ExponentialSmoothing(nasc_treino.values,trend = 'multiplicative', seasonal=None).fit()


# %%
modelo_previsto_suave = modelo_ajustado.predict(start = 335,end = 365) 
Ejemplo n.º 6
0
def forecast(data, train_index, forecast_length):
    fitted_model = SimpleExpSmoothing(data).fit()
    return fitted_model.predict(0, train_index + forecast_length)
Ejemplo n.º 7
0
Train = cocacola.head(38)
Test = cocacola.tail(4)

# to change the index value in pandas data frame
# Test.set_index(np.arange(1,4),inplace=True)


# Creating a function to calculate the MAPE value for test data
def MAPE(pred, org):
    temp = np.abs((pred - org) / org) * 100
    return np.mean(temp)


# Simple Exponential Method
ses_model = SimpleExpSmoothing(Train["Sales"]).fit()
pred_ses = ses_model.predict(start=Test.index[0], end=Test.index[-1])
MAPE(pred_ses, Test.Sales)

# Holt method
hw_model = Holt(Train["Sales"]).fit()
pred_hw = hw_model.predict(start=Test.index[0], end=Test.index[-1])
MAPE(pred_hw, Test.Sales)

# Holts winter exponential smoothing with additive seasonality and additive trend
hwe_model_add_add = ExponentialSmoothing(Train["Sales"],
                                         seasonal="add",
                                         trend="add",
                                         seasonal_periods=4).fit()
pred_hwe_add_add = hwe_model_add_add.predict(start=Test.index[0],
                                             end=Test.index[-1])
MAPE(pred_hwe_add_add, Test.Sales)
seasonal_ts_add = smf.tsa.seasonal_decompose(airlines["Passengers"], freq=10)
seasonal_ts_add.plot()
train = airlines.head(92)
test = airlines.tail(4)


#MAPE####
def MAPE(pred, org):
    temp = np.abs((pred - org)) * 100 / org
    return np.mean(temp)


####Simple Exp##########
Exp = SimpleExpSmoothing(train["Passengers"]).fit()
Exp_pred = Exp.predict(start=test.index[0], end=test.index[-1])
Exp_mape = MAPE(Exp_pred, test.Passengers)  ######32.05

###Holt#######
hw = Holt(train["Passengers"]).fit()
hw_pred = hw.predict(start=test.index[0], end=test.index[-1])
hw_mape = MAPE(hw_pred, test.Passengers)  #####34.75

# Holts winter exponential smoothing with additive seasonality and additive trend
Exp_add_add = ExponentialSmoothing(train["Passengers"],
                                   damped=True,
                                   seasonal="add",
                                   seasonal_periods=12,
                                   trend="add").fit()
Exp_add_add_pred = Exp_add_add.predict(start=test.index[0], end=test.index[-1])
Exp_add_add_Mape = MAPE(Exp_add_add_pred, test.Passengers)  #####4.23
    plt.legend(loc=4)
seasonal_dec=sm.tsa.seasonal_decompose(plastic["Sales"],freq=3)
seasonal_dec.plot()

Train=plastic.head(48)
Test=plastic.tail(12)
Test=Test.set_index(np.arange(1,13))

#MAPE
def MAPE(pred,org):
    temp=np.abs((pred-org))*100/org
    return np.mean(temp)

###Simple exponential Smoothing#######
ses=SimpleExpSmoothing(Train["Sales"]).fit()
ses_pred=ses.predict(start=Test.index[0],end=Test.index[-1])
ses_MAPE=MAPE(ses_pred,Test.Sales)#######26.09

HW=Holt(Train["Sales"]).fit()
HW_pred=HW.predict(start=Test.index[0],end=Test.index[-1])
HW_Mape=MAPE(HW_pred,Test.Sales) #########26.60



HW_exp_add=ExponentialSmoothing(Train["Sales"],trend="add",seasonal="add",seasonal_periods=12,damped=True).fit()
HW_exp_pred=HW_exp_add.predict(start=Test.index[0],end=Test.index[-1])
HW_exp_add_Mape=MAPE(HW_exp_pred,Test.Sales)####25.50

HW_exp_mul_add=ExponentialSmoothing(Train["Sales"],trend="mul",seasonal="add",seasonal_periods=12).fit()
HW_exp_mul_pred=HW_exp_mul_add.predict(start=Test.index[0],end=Test.index[-1])
HW_exp_mul_Mape=MAPE(HW_exp_mul_pred,Test.Sales)####25.73
Ejemplo n.º 10
0
    def forecast_prediction(self, weather_filtered, weather_keys):
        '''
            # TODO: Adjust data-time to give valid look forward prediction
        '''
        scaled_data = self.scale_weather_data(weather_filtered)
        index = [i for i in range(len(scaled_data[weather_keys[0]]))]
        shortest_val = np.Inf
        for i in scaled_data.items():
            shortest_val = min(i[1].shape[0], shortest_val)
        scaled_df = pd.DataFrame(
            {
                weather_keys[0]:
                np.transpose(scaled_data[weather_keys[0]])[0][-shortest_val:],
                weather_keys[1]:
                np.transpose(scaled_data[weather_keys[1]])[0][-shortest_val:],
                weather_keys[2]:
                np.transpose(scaled_data[weather_keys[2]])[0][-shortest_val:],
                weather_keys[3]:
                np.transpose(scaled_data[weather_keys[3]])[0][-shortest_val:],
                weather_keys[4]:
                np.transpose(scaled_data[weather_keys[4]])[0][-shortest_val:],
                weather_keys[5]:
                np.transpose(scaled_data[weather_keys[5]])[0][-shortest_val:],
                weather_keys[6]:
                np.transpose(scaled_data[weather_keys[6]])[0][-shortest_val:],
                weather_keys[7]:
                np.transpose(scaled_data[weather_keys[7]])[0][-shortest_val:],
                weather_keys[8]:
                np.transpose(scaled_data[weather_keys[8]])[0][-shortest_val:],
                # following columns not being used
                'peas': [-1] * shortest_val,
                'citrus': [-1] * shortest_val,
                'potato': [-1] * shortest_val,
            },
            index=index)
        ##############################################################################################
        scaled_df['potato_pymc3'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, pymc3_params)[0],
            axis=1)
        scaled_df['citrus_pymc3'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, pymc3_params)[1],
            axis=1)
        scaled_df['peas_pymc3'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, pymc3_params)[2],
            axis=1)
        scaled_df['crop_pymc3'] = scaled_df[[
            'potato_pymc3', 'citrus_pymc3', 'peas_pymc3'
        ]].idxmax(axis=1).replace('_pymc3', '')

        scaled_df['potato_lr'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, lr_params)[0], axis=1)
        scaled_df['citrus_lr'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, lr_params)[1], axis=1)
        scaled_df['peas_lr'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, lr_params)[2], axis=1)
        scaled_df['crop_lr'] = scaled_df[['potato_lr', 'citrus_lr', 'peas_lr'
                                          ]].idxmax(axis=1).replace('_lr', '')

        scaled_df['potato_ridge'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, ridge_params)[0],
            axis=1)
        scaled_df['citrus_ridge'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, ridge_params)[1],
            axis=1)
        scaled_df['peas_ridge'] = scaled_df.apply(
            lambda x: self.predict_crop_feasibility(x, ridge_params)[2],
            axis=1)
        scaled_df['crop_ridge'] = scaled_df[[
            'potato_ridge', 'citrus_ridge', 'peas_ridge'
        ]].idxmax(axis=1).replace('_ridge', '')

        ## Everything up to here looks fine
        exp_potato = SimpleExpSmoothing(np.asarray(
            scaled_df['potato_pymc3'])).fit(smoothing_level=0.1,
                                            optimized=False)
        exp_citrus = SimpleExpSmoothing(np.asarray(
            scaled_df['citrus_pymc3'])).fit(smoothing_level=0.1,
                                            optimized=False)
        exp_peas = SimpleExpSmoothing(np.asarray(scaled_df['peas_pymc3'])).fit(
            smoothing_level=0.1, optimized=False)

        month_current = int(date.today().strftime('%m'))

        potato_month3 = exp_potato.predict(month_current, month_current + 3)

        rms1 = sqrt(
            mean_squared_error(scaled_df['potato_pymc3'][:4], potato_month3))
        # print(rms1)
        citrus_month3 = exp_citrus.predict(month_current, month_current + 3)
        rms2 = sqrt(
            mean_squared_error(scaled_df['citrus_pymc3'][:4], citrus_month3))
        # print(rms2)
        peas_month3 = exp_peas.predict(month_current, month_current + 3)
        rms3 = sqrt(
            mean_squared_error(scaled_df['peas_pymc3'][:4], peas_month3))
        # print(rms3)

        potato_month6 = exp_potato.predict(month_current, month_current + 6)
        # rms4 = sqrt(mean_squared_error(scaled_df['potato_pymc3'], potato_month6))
        # print(rms4)
        citrus_month6 = exp_citrus.predict(month_current, month_current + 6)
        # rms5 = sqrt(mean_squared_error(scaled_df['citrus_pymc3'], citrus_month6))
        # print(rms5)
        peas_month6 = exp_peas.predict(month_current, month_current + 6)
        # rms6 = sqrt(mean_squared_error(scaled_df['peas_pymc3'], peas_month6))
        # print(rms6)

        scaled_df['3 months potato'] = potato_month3.mean()
        scaled_df['3 months citrus'] = citrus_month3.mean()
        scaled_df['3 months peas'] = peas_month3.mean()
        scaled_df['6 months potato'] = potato_month6.mean()
        scaled_df['6 months citrus'] = citrus_month6.mean()
        scaled_df['6 months peas'] = peas_month6.mean()

        # print(scaled_df[['3 months potato','3 months citrus', '3 months peas','6 months potato', '6 months citrus', '6 months peas'  ]])

        return scaled_df
Ejemplo n.º 11
0
varmax_model = VARMAX(y, exog=exog_data, order=(1,1))
varmax_model = varmax_model.fit(disp=False)
exog2 = [[100]]
varmax_yhat = varmax_model.forecast(exog=exog2)
"""

#### Simple Exponential Smoothing (SES) ####
"""
SES models the next time step as exponentially weighted linear function of prior observations
SES is suitable for UNIVARIATE time series WITHOUT TREND AND SEASONAL COMPONENTS
"""
from statsmodels.tsa.holtwinters import SimpleExpSmoothing

ses_model = SimpleExpSmoothing(y)
ses_model = ses_model.fit()
ses_yhat = ses_model.predict(pd.to_datetime('1998-01-01'), y.index[-1])
# plot results
ax = y['1990':].plot(label='observed', figsize=(15,12))
ses_yhat.plot(label='Simple Exponential Smoothing', ax=ax)
ax.fill_betweenx(ax.get_ylim(), pd.to_datetime('1998-01-01'), y.index[-1], alpha=0.1, zorder=-1)


#### Holt Winter's Exponential Smoothins (HWES) ####
"""
HWES (AKA, Triple Exponential Smoothing) models next step as exponentially weighted linear function
of prior observations, taking trend + seasonality into account.
HWES is suitable for UNIVARIATE time series WITH TREND AND/OR SEASONAL COMPONENTS
"""
from statsmodels.tsa.holtwinters import ExponentialSmoothing

hwes_model = ExponentialSmoothing(y)
def ses(data, forecast_length):
    fitted_model = SimpleExpSmoothing(data).fit()
    prediction = fitted_model.predict(0, len(data) + forecast_length - 1)
    params = fitted_model.params
    params['initial_seasons'] = params['initial_seasons'].tolist()
    return prediction, params