Example #1
0
    def fit(self, **kwargs: xr.DataArray):
        """
        Fits the statsmodels module

        :param kwargs: A dict of input arrays
        :type kwargs: xr.DataArray
        """
        x, y = split_kwargs(kwargs)
        x = list(map(lambda _x: _x.values, x.values()))
        y = list(map(lambda _y: _y.values.reshape(-1), y.values()))

        # Check if the statsmodel accepts exogenous variables
        if len(x) > 0 and "exog" in inspect.signature(
                self.module).parameters or "kwargs" in inspect.signature(
                    self.module).parameters and self.use_exog:
            self.model = self.module(
                endog=np.stack(y, axis=-1),
                exog=np.concatenate(x, axis=-1),
                **self.module_kwargs).fit(**self.fit_kwargs)
        else:
            self.model = self.module(
                endog=np.stack(y, axis=-1),
                **self.module_kwargs).fit(**self.fit_kwargs)

        self.is_fitted = True
Example #2
0
    def fit(self, **kwargs: xr.DataArray):
        """
        Calls the compile and the fit method of the wrapped pytorch module.
        """
        x, y = split_kwargs(kwargs)

        # check if gpu is available
        device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
        self.model.to(device)

        batch_size_str = self.fit_kwargs['batch_size']
        epochs_str = self.fit_kwargs['epochs']

        x_np = xarray_to_numpy(x)
        y_np = xarray_to_numpy(y)

        dataset = TimeSeriesDataset(x_np, y_np)
        train_loader = DataLoader(dataset=dataset,
                                  batch_size=int(batch_size_str),
                                  shuffle=True)

        learning_rate = 1e-4
        scheduler = StepLR(self.optimizer, step_size=1)

        for epoch in range(1, int(epochs_str) + 1):
            self.model.train()
            for batch_idx, (data, target) in enumerate(train_loader):
                # put data to computing device (gpu)
                data, target = data.to(device), target.to(device)

                # Before the backward pass, use the optimizer object to zero all of the
                # gradients for the variables it will update (which are the learnable
                # weights of the model). This is because by default, gradients are
                # accumulated in buffers( i.e, not overwritten) whenever .backward()
                # is called. Checkout docs of torch.autograd.backward for more details.
                self.optimizer.zero_grad()

                # Forward pass: compute predicted y by passing x to the model.
                y_pred = self.model(data)

                # Compute loss
                loss = self.loss_fn(y_pred, target)

                # Backward pass: compute gradient of the loss with respect to model
                # parameters
                loss.backward()

                # Calling the step function on an Optimizer makes an update to its
                # parameters
                self.optimizer.step()

                # maybe do some printing and loss output

            # test routine
            self.model.eval()

            scheduler.step()

        self.model.to("cpu")
        self.is_fitted = True
Example #3
0
 def fit(self, **kwargs):
     """
     Fit the sklearn module
     :param x: input data
     :param y: target data
     """
     inputs, targets = split_kwargs(kwargs)
     self.targets = list(targets.keys())
     x = self._dataset_to_sklearn_input(inputs)
     target = self._dataset_to_sklearn_input(targets)
     self.targets = list(
         zip(targets.keys(), map(lambda x: x.shape[-1] if len(x.shape) > 1 else 1, list(targets.values()))))
     self.module.fit(x, target)
     self.is_fitted = True
Example #4
0
    def fit(self, **kwargs: xr.DataArray):
        """
        Calls the compile and the fit method of the wrapped keras module.
        :param x: The input data
        :param y: The target data
        """
        x, y = split_kwargs(kwargs)

        self.targets = list(y.keys())

        if not self.compiled:
            self.model.compile(**self.compile_kwargs)
            self.compiled = True
        self.model.fit(x=x, y=y, **self.fit_kwargs)
        self.is_fitted = True
Example #5
0
    def fit(self, **kwargs: xr.DataArray):
        """
        Calls the compile and the fit method of the wrapped keras module.
        :param x: The input data
        :param y: The target data
        """
        x, y = split_kwargs(kwargs)
        x = {name_x: value_x.values for name_x, value_x in x.items()}
        y = {name_y: value_y.values for name_y, value_y in y.items()}
        self.targets = list(y.keys())

        if not self.compiled:
            self.model.compile(**self.compile_kwargs)
            self.compiled = True
        self.model.fit(x=x, y=y, **self.fit_kwargs)
        self.is_fitted = True