Exemple #1
0
    def _minimise_loss(self, X, y, lipschitz=None):
        """Use the FISTA algorithm to solve the group lasso regularised loss.
        """
        if self.fit_intercept:
            X = _add_intercept_col(X)

        if lipschitz is None:
            lipschitz = self._compute_lipschitz(X, y)

        if not self.fit_intercept:
            X = _add_intercept_col(X)

        def grad(w):
            g = self._grad(X, y, w)
            if not self.fit_intercept:
                g[0] = 0
            return g

        def prox(w):
            b, w_ = _split_intercept(w)
            w_ = _group_l2_prox(w_, self.reg_vector, self.groups_)
            return _join_intercept(b, w_)

        def loss(w):
            X_, y_ = self.subsample(X, y)
            self._loss(X_, y_, w)

        def callback(x, it_num, previous_x=None):
            X_, y_ = self.subsample(X, y)
            w = x
            previous_w = previous_x

            self.losses_.append(self._loss(X_, y_, w))

            if previous_w is None and _DEBUG:
                print(f"Starting FISTA: ")
                print(f"\tInitial loss: {self._loss(X_, y_, w)}")

            elif _DEBUG:
                print(f"Completed iteration {it_num}:")
                print(f"\tLoss: {self._loss(X_, y_, w)}")
                print(f"\tWeight difference: {la.norm(w-previous_w)}")
                print(f"\tWeight norm: {la.norm(w)}")
                print(f"\tGrad: {la.norm(grad(w))}")

        weights = np.concatenate([self.intercept_, self.coef_])
        weights = fista(
            weights,
            grad=grad,
            prox=prox,
            loss=loss,
            lipschitz=lipschitz,
            n_iter=self.n_iter,
            tol=self.tol,
            callback=callback,
        )
        self.intercept_, self.coef_ = _split_intercept(weights)
    def _minimise_loss(self):
        """Use the FISTA algorithm to solve the group lasso regularised loss.
        """

        # Need transition period before the correct regulariser is used without warning
        def callback(x, it_num, previous_x=None):
            X_, y_ = self.subsample(self.X_, self.y_)
            w = x
            previous_w = previous_x

            if self.LOG_LOSSES:
                self.losses_.append(self._loss(X_, y_, w))

            if previous_w is None and _DEBUG:  # pragma: nocover
                print("Starting FISTA: ")
                print("\tInitial loss: {loss}".format(
                    loss=self._loss(X_, y_, w)))

            elif _DEBUG:  # pragma: nocover
                print("Completed iteration {it_num}:".format(it_num=it_num))
                print("\tLoss: {loss}".format(loss=self._loss(X_, y_, w)))
                print("\tWeight difference: {wdiff}".format(
                    wdiff=la.norm(w - previous_w)))
                print("\tWeight norm: {wnorm}".format(wnorm=la.norm(w)))
                print("\tGrad: {gnorm}".format(
                    gnorm=la.norm(self._unregularised_gradient(w))))

        weights = np.concatenate([self.intercept_, self.coef_])
        weights = fista(
            weights,
            grad=self._unregularised_gradient,
            prox=self._scaled_prox,
            loss=self._subsampled_loss,
            lipschitz=self.lipschitz_,
            n_iter=self.n_iter,
            tol=self.tol,
            callback=callback,
        )
        self.intercept_, self.coef_ = _split_intercept(weights)