コード例 #1
0
ファイル: losses.py プロジェクト: nguigs/pylearn-parsimony
    def step(self, w, index):
        """The step size to use in descent methods.

        From the interface "StepSize".

        Parameters
        ----------
        w : list of numpy arrays
            The point at which to determine the step size.

        index : int
            Non-negative integer. The variable which the step is for.
        """
        all_lipschitz = True
        L = 0.0

        # Add Lipschitz constants from the loss functions.
        fi = self._f[index]
        for j in range(len(fi)):
            fij = fi[j]
            for k in range(len(fij)):
                fijk = fij[k]
                if isinstance(fijk, properties.Gradient):
                    if not isinstance(fijk,
                                      properties.LipschitzContinuousGradient):
                        all_lipschitz = False
                        break
                    else:
                        L += fijk.L(w[index])
                elif isinstance(fijk, mb_properties.MultiblockGradient):
                    if not isinstance(
                            fijk, mb_properties.
                            MultiblockLipschitzContinuousGradient):
                        all_lipschitz = False
                        break
                    else:
                        L += fijk.L([w[index], w[j]], 0)

                if not all_lipschitz:
                    break

        for i in range(len(self._f)):
            fij = self._f[i][index]
            if i != index:  # Do not visit these twice.
                for k in range(len(fij)):
                    fijk = fij[k]
                    if isinstance(fijk, properties.Gradient):
                        # We shouldn't do anything here, right? This means that
                        # this (block i) is e.g. the y in a logistic
                        # regression.
                        pass
                    elif isinstance(fijk, mb_properties.MultiblockGradient):
                        if not isinstance(
                                fijk, mb_properties.
                                MultiblockLipschitzContinuousGradient):
                            all_lipschitz = False
                            break
                        else:
                            L += fijk.L([w[i], w[index]], 1)

        # Add Lipschitz constants from the penalties.
        di = self._d[index]
        for k in range(len(di)):
            if not isinstance(di[k], properties.LipschitzContinuousGradient):
                all_lipschitz = False
                break
            else:
                L += di[k].L()  # w[index])

        Ni = self._N[index]
        for k in range(len(Ni)):
            if not isinstance(Ni[k], properties.LipschitzContinuousGradient):
                all_lipschitz = False
                break
            else:
                L += Ni[k].L()  # w[index])

        step = 0.0
        if all_lipschitz and L >= consts.TOLERANCE:
            step = 1.0 / L
        else:
            # If all functions did not have Lipschitz continuous gradients,
            # try to find the step size through backtracking line search.
            class F(properties.Function, properties.Gradient):
                def __init__(self, func, w, index):
                    self.func = func
                    self.w = w
                    self.index = index

                def f(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    f = self.func.f(w)
                    self.w[self.index] = w_old

                    return f

                def grad(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    g = self.func.grad(w, index)
                    self.w[self.index] = w_old

                    return g

            func = F(self, w, index)
            p = -self.grad(w, index)

            from parsimony.algorithms.utils import BacktrackingLineSearch
            import parsimony.functions.penalties as penalties
            line_search = BacktrackingLineSearch(
                condition=penalties.SufficientDescentCondition, max_iter=30)
            a = np.sqrt(1.0 / self.X[index].shape[1])  # Arbitrarily "small".
            step = line_search.run(func,
                                   w[index],
                                   p,
                                   rho=0.5,
                                   a=a,
                                   condition_params={"c": 1e-4})

        return step
コード例 #2
0
ファイル: losses.py プロジェクト: neurospin/pylearn-parsimony
    def step(self, w, index):
        """The step size to use in descent methods.

        From the interface "StepSize".

        Parameters
        ----------
        w : list of numpy arrays
            The point at which to determine the step size.

        index : int
            Non-negative integer. The variable which the step is for.
        """
        all_lipschitz = True
        L = 0.0

        # Add Lipschitz constants from the loss functions.
        fi = self._f[index]
        for j in range(len(fi)):
            fij = fi[j]
            for k in range(len(fij)):
                fijk = fij[k]
                if isinstance(fijk, properties.Gradient):
                    if not isinstance(fijk,
                                      properties.LipschitzContinuousGradient):
                        all_lipschitz = False
                        break
                    else:
                        L += fijk.L(w[index])
                elif isinstance(fijk, mb_properties.MultiblockGradient):
                    if not isinstance(fijk,
                                      mb_properties.MultiblockLipschitzContinuousGradient):
                        all_lipschitz = False
                        break
                    else:
                        L += fijk.L([w[index], w[j]], 0)

                if not all_lipschitz:
                    break

        for i in range(len(self._f)):
            fij = self._f[i][index]
            if i != index:  # Do not visit these twice.
                for k in range(len(fij)):
                    fijk = fij[k]
                    if isinstance(fijk, properties.Gradient):
                        # We shouldn't do anything here, right? This means that
                        # this (block i) is e.g. the y in a logistic
                        # regression.
                        pass
                    elif isinstance(fijk, mb_properties.MultiblockGradient):
                        if not isinstance(fijk,
                                          mb_properties.MultiblockLipschitzContinuousGradient):
                            all_lipschitz = False
                            break
                        else:
                            L += fijk.L([w[i], w[index]], 1)

        # Add Lipschitz constants from the penalties.
        di = self._d[index]
        for k in range(len(di)):
            if not isinstance(di[k], properties.LipschitzContinuousGradient):
                all_lipschitz = False
                break
            else:
                L += di[k].L()  # w[index])

        Ni = self._N[index]
        for k in range(len(Ni)):
            if not isinstance(Ni[k], properties.LipschitzContinuousGradient):
                all_lipschitz = False
                break
            else:
                L += Ni[k].L()  # w[index])

        step = 0.0
        if all_lipschitz and L >= consts.TOLERANCE:
            step = 1.0 / L
        else:
            # If all functions did not have Lipschitz continuous gradients,
            # try to find the step size through backtracking line search.
            class F(properties.Function,
                    properties.Gradient):

                def __init__(self, func, w, index):
                    self.func = func
                    self.w = w
                    self.index = index

                def f(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    f = self.func.f(w)
                    self.w[self.index] = w_old

                    return f

                def grad(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    g = self.func.grad(w, index)
                    self.w[self.index] = w_old

                    return g

            func = F(self, w, index)
            p = -self.grad(w, index)

            from parsimony.algorithms.utils import BacktrackingLineSearch
            import parsimony.functions.penalties as penalties
            line_search = BacktrackingLineSearch(
                condition=penalties.SufficientDescentCondition, max_iter=30)
            a = np.sqrt(1.0 / self.X[index].shape[1])  # Arbitrarily "small".
            step = line_search.run(func, w[index], p, rho=0.5, a=a,
                                   condition_params={"c": 1e-4})

        return step
コード例 #3
0
ファイル: losses.py プロジェクト: nguigs/pylearn-parsimony
    def step(self, w, index):

        #        return 0.0001

        all_lipschitz = True

        # Add the Lipschitz constants.
        L = 0.0
        fi = self.functions[index]
        for j in range(len(fi)):
            if j != index and fi[j] is not None:
                fij = fi[j]
                if isinstance(fij, properties.LipschitzContinuousGradient):
                    L += fij.L()
                elif isinstance(
                        fij,
                        mb_properties.MultiblockLipschitzContinuousGradient):
                    L += fij.L(w, index)
                else:
                    all_lipschitz = False
                    break

        if all_lipschitz:
            fii = self.functions[index][index]
            for k in range(len(fii)):
                if fi[j] is None:
                    continue
                if isinstance(fii[k], properties.LipschitzContinuousGradient):
                    L += fii[k].L()
                elif isinstance(
                        fii[k],
                        mb_properties.MultiblockLipschitzContinuousGradient):
                    L += fii[k].L(w, index)
                else:
                    all_lipschitz = False
                    break

        if all_lipschitz and L > 0.0:
            t = 1.0 / L
        else:
            # If all functions did not have Lipschitz continuous gradients,
            # try to find the step size through backtracking line search.
            class F(properties.Function, properties.Gradient):
                def __init__(self, func, w, index):
                    self.func = func
                    self.w = w
                    self.index = index

                def f(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    f = self.func.f(w)
                    self.w[self.index] = w_old

                    return f

                def grad(self, x):

                    # Temporarily replace the index:th variable with x.
                    w_old = self.w[self.index]
                    self.w[self.index] = x
                    g = self.func.grad(w, index)
                    self.w[self.index] = w_old

                    return g

            func = F(self, w, index)
            p = -self.grad(w, index)

            from algorithms import BacktrackingLineSearch
            import parsimony.functions.penalties as penalties
            line_search = BacktrackingLineSearch(
                condition=penalties.SufficientDescentCondition, max_iter=30)
            a = np.sqrt(1.0 / self.X[index].shape[1])  # Arbitrarily "small".
            t = line_search(func, w[index], p, rho=0.5, a=a, c=1e-4)

        return t