コード例 #1
0
    def f(self, beta):
        """Function value.
        """
        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        return self.l * (maths.norm1(beta_) - self.c)
コード例 #2
0
ファイル: l1.py プロジェクト: sb123456789sb/pylearn-parsimony
    def feasible(self, beta):
        """Feasibility of the constraint.

        From the interface "Constraint".
        """
        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        return maths.norm1(beta_) <= self.c
コード例 #3
0
ファイル: l1.py プロジェクト: neurospin/pylearn-parsimony
    def feasible(self, beta):
        """Feasibility of the constraint.

        From the interface "Constraint".
        """
        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        return maths.norm1(beta_) <= self.c
コード例 #4
0
    def f(self, beta):
        """Function value.
        """
        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        if maths.norm(beta_) ** 2.0 > self.l2:
            return consts.FLOAT_INF

        return self.l1 * maths.norm1(beta_)
コード例 #5
0
ファイル: l1.py プロジェクト: sb123456789sb/pylearn-parsimony
    def f(self, beta):
        """ Function value.
        """
        if self.l < consts.TOLERANCE:
            return 0.0

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        return self.l * (maths.norm1(beta_) - self.c)
コード例 #6
0
    def feasible(self, beta):
        """Feasibility of the constraint.

        From the interface "Constraint".

        Parameters
        ----------
        beta : Numpy array. The variable to check for feasibility.
        """
        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        return maths.norm1(beta_) <= self.c
コード例 #7
0
    def f(self, beta):
        """ Function value.
        """
        if self.l < consts.TOLERANCE and self.g < consts.TOLERANCE:
            return 0.0

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        A = self.A()
        return maths.norm1(A[0].dot(beta_)) + \
               np.sum(np.sqrt(A[1].dot(beta_) ** 2.0 +
                              A[2].dot(beta_) ** 2.0 +
                              A[3].dot(beta_) ** 2.0))
コード例 #8
0
ファイル: l1tv.py プロジェクト: neurospin/pylearn-parsimony
    def f(self, beta):
        """ Function value.
        """
        if self.l < consts.TOLERANCE and self.g < consts.TOLERANCE:
            return 0.0

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        # lambda and gamma are in A.
        A = self.A()
        abeta_tv = A[1].dot(beta_) ** 2
        for k in range(2, len(A)):
            abeta_tv += A[k].dot(beta_) ** 2

        return maths.norm1(A[0].dot(beta_)) + np.sum(np.sqrt(abeta_tv))
コード例 #9
0
ファイル: l1tv.py プロジェクト: PeipeiRAN/pylearn-parsimony
    def f(self, beta):
        """ Function value.
        """
        if self.l < consts.TOLERANCE and self.g < consts.TOLERANCE:
            return 0.0

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        # lambda and gamma are in A.
        A = self.A()
        abeta_tv = A[1].dot(beta_) ** 2.0
        for k in xrange(2, len(A)):
            abeta_tv += A[k].dot(beta_) ** 2

        return maths.norm1(A[0].dot(beta_)) + np.sum(np.sqrt(abeta_tv))
コード例 #10
0
    def _f(self, Xbeta_y, y, beta):

        n = y.shape[0]

        if self.mean:
            d = 2.0 * n
        else:
            d = 2.0

        f = (1.0 / d) * np.sum(Xbeta_y ** 2.0)

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        f += self.l * maths.norm1(beta_)

        return f
コード例 #11
0
    def _f(self, Xbeta_y, y, beta):

        n = y.shape[0]

        if self.mean:
            d = 2.0 * n
        else:
            d = 2.0

        f = (1.0 / d) * np.sum(Xbeta_y**2.0)

        if self.penalty_start > 0:
            beta_ = beta[self.penalty_start:, :]
        else:
            beta_ = beta

        f += self.l * maths.norm1(beta_)

        return f
コード例 #12
0
            def f(self, l):
                beta = (abs_beta > l) \
                    * (self.beta - l * np.sign(self.beta - l))

                return maths.norm1(beta) - self.c
コード例 #13
0
    def run(self, function, w):

#        self.info.clear()

        if self.info_requested(Info.ok):
            self.info_set(Info.ok, False)
        if self.info_requested(Info.time):
            t = []
        if self.info_requested(Info.fvalue):
            f = []
        if self.info_requested(Info.converged):
            self.info_set(Info.converged, False)

        print "len(w):", len(w)
        print "max_iter:", self.max_iter

        num_iter = [0] * len(w)

        for it in xrange(1, self.outer_iter + 1):

            all_converged = True

            for i in xrange(len(w)):
                print "it: %d, i: %d" % (it, i)

                if function.has_nesterov_function(i):
                    print "Block %d has a Nesterov function!" % (i,)
                    func = mb_losses.MultiblockNesterovFunctionWrapper(
                                                               function, w, i)
                    algorithm = self.conesta
                else:
                    func = mb_losses.MultiblockFunctionWrapper(function, w, i)
                    algorithm = self.fista

#                self.alg_info.clear()
#                self.algorithm.set_params(max_iter=self.max_iter - num_iter[i])
#                w[i] = self.algorithm.run(func, w_old[i])
                if i == 1:
                    pass
                w[i] = algorithm.run(func, w[i])

                if algorithm.info_requested(Info.num_iter):
                    num_iter[i] += algorithm.info_get(Info.num_iter)
                if algorithm.info_requested(Info.time):
                    tval = algorithm.info_get(Info.time)
                if algorithm.info_requested(Info.fvalue):
                    fval = algorithm.info_get(Info.fvalue)

                if self.info_requested(Info.time):
                    t = t + tval
                if self.info_requested(Info.fvalue):
                    f = f + fval

                print "l0 :", maths.norm0(w[i]), \
                    ", l1 :", maths.norm1(w[i]), \
                    ", l2²:", maths.norm(w[i]) ** 2.0

            print "f:", fval[-1]

            for i in xrange(len(w)):

                # Take one ISTA step for use in the stopping criterion.
                step = function.step(w, i)
                w_tilde = function.prox(w[:i] +
                                        [w[i] - step * function.grad(w, i)] +
                                        w[i + 1:], i, step)

#                func = mb_losses.MultiblockFunctionWrapper(function, w, i)
#                step2 = func.step(w[i])
#                w_tilde2 = func.prox(w[i] - step2 * func.grad(w[i]), step2)
#
#                print "diff:", maths.norm(w_tilde - w_tilde2)

                print "err:", maths.norm(w[i] - w_tilde) * (1.0 / step)
                if (1.0 / step) * maths.norm(w[i] - w_tilde) > self.eps:
                    all_converged = False
                    break

            if all_converged:
                print "All converged!"

                if self.info_requested(Info.converged):
                    self.info_set(Info.converged, True)

                break

#            # If all blocks have used max_iter iterations, stop.
#            if np.all(np.asarray(num_iter) >= self.max_iter):
#                break

#            it += 1

        if self.info_requested(Info.num_iter):
            self.info_set(Info.num_iter, num_iter)
        if self.info_requested(Info.time):
            self.info_set(Info.time, t)
        if self.info_requested(Info.fvalue):
            self.info_set(Info.fvalue, f)
        if self.info_requested(Info.ok):
            self.info_set(Info.ok, True)

        return w
コード例 #14
0
    def run(self, function, w):

        if self.info_requested(Info.ok):
            self.info_set(Info.ok, False)
        if self.info_requested(Info.time):
            t = [0.0]
        if self.info_requested(Info.fvalue):
            f = [function.f(w)]
        if self.info_requested(Info.converged):
            self.info_set(Info.converged, False)

        self.algorithm = FISTA(info=self.fista_info,
                               eps=self.eps,
                               max_iter=self.max_iter,
                               min_iter=self.min_iter)

        print "len(w):", len(w)
        print "max_iter:", self.algorithm.max_iter

        num_iter = [0] * len(w)
#        w_old = [0] * len(w)

#        it = 0
#        while True:
        for it in xrange(1, self.outer_iter + 1):

            all_converged = True

            for i in xrange(len(w)):
                print "it: %d, i: %d" % (it, i)

#                for j in xrange(len(w)):
#                    w_old[j] = w[j]

                if i == 0:
                    pass
                if i == 1:
                    pass
                if i == 2:
                    pass

                func = mb_losses.MultiblockFunctionWrapper(function, w, i)
#                self.fista_info.clear()
#                self.algorithm.set_params(max_iter=self.max_iter - num_iter[i])
#                w[i] = self.algorithm.run(func, w_old[i])
                w[i] = self.algorithm.run(func, w[i])

                num_iter[i] += self.algorithm.num_iter

                if self.algorithm.info_requested(Info.time):
                    tval = self.algorithm.info_get(Info.time)
                if self.algorithm.info_requested(Info.fvalue):
                    fval = self.algorithm.info_get(Info.fvalue)

#                if Info.converged in self.fista_info:
#                    if not self.fista_info[Info.converged] \
#                            or self.fista_info[Info.num_iter] > 1:
#                        all_converged = False

#                if maths.norm(w_old[i] - w[i]) < self.eps:
#                    converged = True
#                    break

                if self.info_requested(Info.time):
                    t = t + tval
                if self.info_requested(Info.fvalue):
                    f = f + fval

                print "l0 :", maths.norm0(w[i]), \
                    ", l1 :", maths.norm1(w[i]), \
                    ", l2²:", maths.norm(w[i]) ** 2.0

            if self.algorithm.info_requested(Info.fvalue):
                print "f:", fval[-1]

            for i in xrange(len(w)):

                # Take one ISTA step for use in the stopping criterion.
                step = function.step(w, i)
                w_tilde = function.prox(w[:i] +
                                        [w[i] - step * function.grad(w, i)] +
                                        w[i + 1:], i, step)

#                func = mb_losses.MultiblockFunctionWrapper(function, w, i)
#                step2 = func.step(w[i])
#                w_tilde2 = func.prox(w[i] - step2 * func.grad(w[i]), step2)
#
#                print "diff:", maths.norm(w_tilde - w_tilde2)

#                print "err:", maths.norm(w[i] - w_tilde) * (1.0 / step)
                if maths.norm(w[i] - w_tilde) > step * self.eps:
                    all_converged = False
                    break

            if all_converged:
                print "All converged!"

                if self.info_requested(Info.converged):
                    self.info_set(Info.converged, True)

                break

#            # If all blocks have used max_iter iterations, stop.
#            if np.all(np.asarray(num_iter) >= self.max_iter):
#                break

#            it += 1

        if self.info_requested(Info.num_iter):
            self.info_set(Info.num_iter, num_iter)
        if self.info_requested(Info.time):
            self.info_set(Info.time, t)
        if self.info_requested(Info.fvalue):
            self.info_set(Info.fvalue, f)
        if self.info_requested(Info.ok):
            self.info_set(Info.ok, True)

        return w