def predict(self, test: pd.DataFrame, train: pd.DataFrame) -> pd.DataFrame:
        """
        Deliver (back-transformed), if specified one step ahead, out-of-sample predictions
        :param test: test set
        :param train: train set
        :return: DataFrame with predictions
        """
        if (self.power_transformer is not None) or self.log:
            test = TrainHelper.get_transformed_set(
                dataset=test,
                target_column=self.target_column,
                power_transformer=self.power_transformer,
                log=self.log,
                only_transform=True)
            train = TrainHelper.get_transformed_set(
                dataset=train,
                target_column=self.target_column,
                power_transformer=self.power_transformer,
                log=self.log)
        if self.one_step_ahead:
            train_manip = train.copy()[self.target_column]
            predict = []
            # deep copy model as predict function should not change class model
            model_results = copy.deepcopy(self.model_results)
            for ind in test.index:
                fc = model_results.forecast()
                predict.append(fc[ind])
                train_manip = train_manip.append(
                    pd.Series(data=test[self.target_column], index=[ind]))
                model_results = self.update(train=pd.DataFrame(
                    data=train_manip, columns=[self.target_column]))
        else:
            predict = self.model_results.predict(start=test.index[0],
                                                 end=test.index[-1])
        predictions = pd.DataFrame({'Prediction': predict}, index=test.index)

        if self.power_transformer is not None:
            predictions = pd.DataFrame(
                {
                    'Prediction':
                    self.power_transformer.inverse_transform(
                        predictions['Prediction'].values.reshape(-1,
                                                                 1)).flatten()
                },
                index=predictions.index)
        if self.log:
            if self.contains_zeros:
                predictions = predictions.apply(np.exp) + 1
            else:
                predictions = predictions.apply(np.exp)

        return predictions
 def train(self, train: pd.DataFrame, cross_val_call: bool = False) -> dict:
     """
     Train (S)ARIMA(X) model
     :param train: train set
     :param cross_val_call: called to perform cross validation
     :return dictionary with cross validated scores (if specified)
     """
     cross_val_score_dict = {}
     if cross_val_call:
         cross_val_score_dict, self.model = self.get_cross_val_score(
             train=train)
     train_exog = None
     if (self.power_transformer is not None) or self.log:
         train = TrainHelper.get_transformed_set(
             dataset=train,
             target_column=self.target_column,
             power_transformer=self.power_transformer,
             log=self.log)
     if self.use_exog:
         train_exog = train.drop(labels=[self.target_column], axis=1)
         self.exog_cols_dropped = train_exog.columns[
             train_exog.isna().any()].tolist()
         PreparationHelper.drop_columns(train_exog, self.exog_cols_dropped)
         train_exog = train_exog.to_numpy(dtype=float)
     self.model.fit(y=train[self.target_column],
                    exogenous=train_exog,
                    trend=self.trend)
     return cross_val_score_dict
 def train(self, train: pd.DataFrame, cross_val_call: bool = False) -> dict:
     """
     Train Exponential Smoothing model
     :param train: train set
     :param cross_val_call: called to perform cross validation
     """
     cross_val_score_dict = {}
     if cross_val_call:
         cross_val_score_dict, self.model = self.get_cross_val_score(
             train=train)
     if (self.power_transformer is not None) or self.log:
         train = TrainHelper.get_transformed_set(
             dataset=train,
             target_column=self.target_column,
             power_transformer=self.power_transformer,
             log=self.log)
     if (0 in train[self.target_column].values) and (
             self.trend == 'mul' or self.seasonal == 'mul'):
         # multiplicative trend or seasonal only working with strictly-positive data
         # only done if no transform was performed, otherwise values would need to be corrected a lot
         train = train.copy()
         train[self.target_column] += 0.01
     self.model = statsmodels.tsa.api.ExponentialSmoothing(
         endog=train[self.target_column],
         trend=self.trend,
         damped=self.damped,
         seasonal=self.seasonal,
         seasonal_periods=self.seasonal_periods)
     self.model_results = self.model.fit(remove_bias=self.remove_bias,
                                         use_brute=self.use_brute)
     return cross_val_score_dict
    def predict(self, test: pd.DataFrame, train: pd.DataFrame) -> pd.DataFrame:
        """
        Deliver (back-transformed), if specified one step ahead, out-of-sample predictions
        :param test: test set
        :param train: train set
        :return: DataFrame with predictions, upper and lower confidence level
        """
        test_exog = None
        if (self.power_transformer is not None) or self.log:
            test = TrainHelper.get_transformed_set(
                dataset=test,
                target_column=self.target_column,
                power_transformer=self.power_transformer,
                log=self.log,
                only_transform=True)
        if self.use_exog:
            test_exog = test.drop(labels=[self.target_column], axis=1)
            PreparationHelper.drop_columns(test_exog, self.exog_cols_dropped)
            test_exog = test_exog.to_numpy(dtype=float)
        if self.one_step_ahead:
            predict = []
            conf_low = []
            conf_up = []
            # deep copy model as predict function should not change class model
            model = copy.deepcopy(self.model)
            for i in range(0, test.shape[0]):
                if self.use_exog:
                    fc, conf = model.predict(n_periods=1,
                                             exogenous=pd.DataFrame(
                                                 test_exog[i].reshape(1, -1)),
                                             return_conf_int=True,
                                             alpha=0.05)
                    model.update(test[self.target_column][i],
                                 exogenous=pd.DataFrame(test_exog[i].reshape(
                                     1, -1)))
                else:
                    fc, conf = model.predict(n_periods=1,
                                             return_conf_int=True,
                                             alpha=0.05)
                    model.update(test[self.target_column][i])
                predict.append(fc[0])
                conf_low.append(conf[0][0])
                conf_up.append(conf[0][1])
        else:
            predict, conf = self.model.predict(n_periods=test.shape[0],
                                               exogenous=test_exog,
                                               return_conf_int=True,
                                               alpha=0.05)
            conf_low = conf[:, 0]
            conf_up = conf[:, 1]
        predictions = pd.DataFrame(
            {
                'Prediction': predict,
                'LowerConf': conf_low,
                'UpperConf': conf_up
            },
            index=test.index)

        if self.power_transformer is not None:
            predictions = pd.DataFrame(
                {
                    'Prediction':
                    self.power_transformer.inverse_transform(
                        predictions['Prediction'].values.reshape(-1,
                                                                 1)).flatten(),
                    'LowerConf':
                    self.power_transformer.inverse_transform(
                        predictions['LowerConf'].values.reshape(-1,
                                                                1)).flatten(),
                    'UpperConf':
                    self.power_transformer.inverse_transform(
                        predictions['UpperConf'].values.reshape(-1,
                                                                1)).flatten()
                },
                index=predictions.index)
        if self.log:
            predict_backtr = np.exp(predictions['Prediction'])
            if self.contains_zeros:
                predict_backtr += 1
            lower_dist = (
                (predictions['Prediction'] - predictions['LowerConf']) /
                predictions['Prediction']) * predict_backtr
            upper_dist = (
                (predictions['UpperConf'] - predictions['Prediction']) /
                predictions['Prediction']) * predict_backtr
            predictions = pd.DataFrame(
                {
                    'Prediction': predict_backtr,
                    'LowerConf': predict_backtr - lower_dist,
                    'UpperConf': predict_backtr + upper_dist
                },
                index=predictions.index)
        return predictions