Пример #1
0
    def test_Hstack(self):
        shape = [5]
        I = linop.Identity(shape)
        x1 = util.randn(shape)
        x2 = util.randn(shape)
        x = util.vec([x1, x2])

        A = linop.Hstack([I, I])
        npt.assert_allclose(A(x), x1 + x2)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_normal(A)
        self.check_linop_pickleable(A)

        shape = [5, 3]
        I = linop.Identity(shape)
        x1 = util.randn(shape)
        x2 = util.randn(shape)
        x = np.concatenate([x1, x2], axis=1)

        A = linop.Hstack([I, I], axis=1)
        npt.assert_allclose(A(x), x1 + x2)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_normal(A)
        self.check_linop_pickleable(A)
Пример #2
0
    def _get_ADMM(self):
        r"""Considers the formulation:

        .. math::
            \min_{x, v: G x = v} \frac{1}{2} \|A x - y\|_2^2 +
            \frac{\lambda}{2} \| x - z \|_2^2 + g(v)

        """
        xp = self.x_device.xp
        with self.x_device:
            if self.G is None:
                v = self.x.copy()
            else:
                v = self.G(self.x)

            u = xp.zeros_like(v)

        def minL_x():
            AHy = self.A.H * self.y
            if self.G is None:
                AHy += self.rho * (v - u)
            else:
                AHy += self.rho * self.G.H(v - u)

            if self.z is not None:
                AHy += self.lamda * self.z

            AHA = self.A.N
            I = linop.Identity(self.x.shape)
            if self.G is None:
                AHA += (self.lamda + self.rho) * I
            else:
                if self.lamda > 0:
                    AHA += self.lamda * I

                AHA += self.rho * self.G.H * self.G

            App(ConjugateGradient(AHA, AHy, self.x, P=self.P,
                                  max_iter=self.max_cg_iter),
                show_pbar=False).run()

        def minL_v():
            if self.G is None:
                backend.copyto(v, self.x + u)
            else:
                backend.copyto(v, self.G(self.x) + u)

            if self.proxg is not None:
                backend.copyto(v, self.proxg(1 / self.rho, v))

        I_v = linop.Identity(v.shape)
        if self.G is None:
            I_x = linop.Identity(self.x.shape)
            G = I_x
        else:
            G = self.G

        self.alg = ADMM(minL_x, minL_v, self.x, v, u,
                        G, -I_v, 0, max_iter=self.max_iter)
Пример #3
0
        def minL_x():
            AHy = self.A.H * self.y
            if self.G is None:
                AHy += self.rho * (v - u)
            else:
                AHy += self.rho * self.G.H(v - u)

            if self.z is not None:
                AHy += self.lamda * self.z

            AHA = self.A.H * self.A
            I = linop.Identity(self.x.shape)
            if self.G is None:
                AHA += (self.lamda + self.rho) * I
            else:
                if self.lamda > 0:
                    AHA += self.lamda * I

                AHA += self.rho * self.G.H * self.G

            App(ConjugateGradient(AHA,
                                  AHy,
                                  self.x,
                                  P=self.P,
                                  max_iter=self.max_cg_iter),
                show_pbar=False).run()
Пример #4
0
    def test_Compose(self):
        shape = [5]
        I = linop.Identity(shape)
        A = linop.Compose([I, I])
        x = util.randn(shape)

        npt.assert_allclose(A(x), x)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_pickleable(A)
Пример #5
0
    def test_Identity(self):
        shape = [5]
        A = linop.Identity(shape)
        x = util.randn(shape)

        npt.assert_allclose(A(x), x)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_unitary(A)
        self.check_linop_pickleable(A)
Пример #6
0
    def test_Add(self):
        shape = [5]
        I = linop.Identity(shape)
        A = linop.Add([I, I])
        x = util.randn(shape)

        npt.assert_allclose(A(x), 2 * x)
        check_linop_linear(A)
        check_linop_adjoint(A)
        check_linop_pickleable(A)
Пример #7
0
    def test_Diag(self):
        shape = [5]
        I = linop.Identity(shape)
        x = util.randn([10])

        A = linop.Diag([I, I])
        npt.assert_allclose(A(x), x)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_pickleable(A)

        shape = [5, 3]
        I = linop.Identity(shape)
        x = util.randn([5, 6])

        A = linop.Diag([I, I], iaxis=1, oaxis=1)
        npt.assert_allclose(A(x), x)
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_pickleable(A)
Пример #8
0
    def test_Vstack(self):
        shape = [5]
        I = linop.Identity(shape)
        x = util.randn(shape)

        A = linop.Vstack([I, I])
        npt.assert_allclose(A(x), util.vec([x, x]))
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_pickleable(A)

        shape = [5, 3]
        I = linop.Identity(shape)
        x = util.randn(shape)

        A = linop.Vstack([I, I], axis=1)
        npt.assert_allclose(A(x), np.concatenate([x, x], axis=1))
        self.check_linop_linear(A)
        self.check_linop_adjoint(A)
        self.check_linop_pickleable(A)
Пример #9
0
    def _get_ConjugateGradient(self):
        I = linop.Identity(self.x.shape)
        AHA = self.A.H * self.A
        AHy = self.A.H(self.y)

        if self.lamda != 0:
            AHA += self.lamda * I
            if self.z is not None:
                util.axpy(AHy, self.lamda, self.z)

        self.alg = ConjugateGradient(
            AHA, AHy, self.x, P=self.P, max_iter=self.max_iter)
Пример #10
0
    def _get_GradientMethod(self):
        def gradf(x):
            with self.y_device:
                r = self.A(x)
                r -= self.y

            with self.x_device:
                gradf_x = self.A.H(r)
                if self.lamda != 0:
                    if self.R is None:
                        util.axpy(gradf_x, self.lamda, x)
                    else:
                        util.axpy(gradf_x, self.lamda, self.R.H(self.R(x)))

                if self.mu != 0:
                    util.axpy(gradf_x, self.mu, x - self.z)

                return gradf_x

        I = linop.Identity(self.x.shape)
        AHA = self.A.H * self.A

        if self.lamda != 0:
            if self.R is None:
                AHA += self.lamda * I
            else:
                AHA += self.lamda * self.R.H * self.R

        if self.mu != 0:
            AHA += self.mu * I

        max_eig = MaxEig(AHA,
                         dtype=self.x.dtype,
                         device=self.x_device,
                         max_iter=self.max_power_iter,
                         show_pbar=self.show_pbar).run()

        if max_eig == 0:
            self.alpha = 1
        else:
            self.alpha = 1 / max_eig

        self.alg = GradientMethod(gradf,
                                  self.x,
                                  self.alpha,
                                  proxg=self.proxg,
                                  max_iter=self.max_iter,
                                  accelerate=self.accelerate)
Пример #11
0
    def _get_ADMM(self):
        r"""Considers the formulation:

        .. math::
            \min_{x, z: x = z} \frac{1}{2} \|A x - y\|_2^2 +
            \frac{\lambda}{2} \| R x - z \|_2^2 + g(z)

        """
        xp = self.x_device.xp
        with self.x_device:
            z = self.x.copy()
            u = xp.zeros_like(self.x)

        def minL_x():
            if self.z is None:
                z_u = self.rho * (z - u)
            else:
                z_u = self.rho * (z - u) + self.lamda * self.z

            z_u /= (self.rho + self.lamda)

            LinearLeastSquares(self.A,
                               self.y,
                               self.x,
                               lamda=self.lamda + self.rho,
                               z=z_u,
                               P=self.P,
                               max_iter=self.max_cg_iter,
                               show_pbar=self.show_pbar,
                               leave_pbar=False).run()

        def minL_z():
            if self.proxg is None:
                backend.copyto(z, self.x + u)
            else:
                backend.copyto(z, self.proxg(1 / self.rho, self.x + u))

        I = linop.Identity(self.x.shape)
        self.alg = ADMM(minL_x,
                        minL_z,
                        self.x,
                        z,
                        u,
                        I,
                        -I,
                        0,
                        max_iter=self.max_iter)
Пример #12
0
    def _get_ConjugateGradient(self):
        I = linop.Identity(self.x.shape)
        AHA = self.A.H * self.A
        AHy = self.A.H(self.y)

        if self.lamda != 0:
            if self.R is None:
                AHA += self.lamda * I
            else:
                AHA += self.lamda * self.R.H * self.R

        if self.mu != 0:
            AHA += self.mu * I
            util.axpy(AHy, self.mu, self.z)

        self.alg = ConjugateGradient(
            AHA, AHy, self.x, P=self.P, max_iter=self.max_iter)
Пример #13
0
    def _get_GradientMethod(self):
        with self.y_device:
            AHy = self.A.H(self.y)

        def gradf(x):
            with self.x_device:
                gradf_x = self.A.N(x) - AHy
                if self.lamda != 0:
                    if self.z is None:
                        util.axpy(gradf_x, self.lamda, x)
                    else:
                        util.axpy(gradf_x, self.lamda, x - self.z)

                return gradf_x

        if self.alpha is None:
            I = linop.Identity(self.x.shape)
            AHA = self.A.N
            if self.lamda != 0:
                AHA += self.lamda * I

            max_eig = MaxEig(AHA, dtype=self.x.dtype, device=self.x_device,
                             max_iter=self.max_power_iter,
                             show_pbar=self.show_pbar).run()
            if max_eig == 0:
                self.alpha = 1
            else:
                self.alpha = 1 / max_eig

        self.alg = GradientMethod(
            gradf,
            self.x,
            self.alpha,
            proxg=self.proxg,
            max_iter=self.max_iter,
            accelerate=self.accelerate, tol=self.tol)
Пример #14
0
    def _get_ADMM_G(self):
        r"""Considers the formulation:

        .. math::
            \min_{x, z: x = z_1, G x = z_2}
            \frac{1}{2} \|A z_1 - y\|_2^2 +
            \frac{\lambda}{2} \| R z_1 - z \|_2^2 + g(z_2)

        """
        xp = self.x_device.xp
        with self.x_device:
            z = xp.concatenate([self.x.ravel(), self.G(self.x).ravel()])
            u = xp.zeros_like(z)

        I = linop.Identity(self.x.shape)
        I_G = linop.Vstack([I, self.G])

        def minL_x():
            LinearLeastSquares(I_G,
                               z - u,
                               self.x,
                               max_iter=self.max_cg_iter,
                               show_pbar=self.show_pbar,
                               leave_pbar=False).run()

        def minL_z():
            z1 = z[:self.x.size].reshape(self.x.shape)
            z2 = z[self.x.size:].reshape(self.G.oshape)
            u1 = u[:self.x.size].reshape(self.x.shape)
            u2 = u[self.x.size:].reshape(self.G.oshape)

            if self.z is None:
                x_u1 = self.rho * (self.x + u1)
            else:
                x_u1 = self.rho * (self.x + u1) + self.lamda * self.z

            x_u1 /= (self.rho + self.lamda)

            LinearLeastSquares(self.A,
                               self.y,
                               z1,
                               lamda=self.lamda + self.rho,
                               z=x_u1,
                               P=self.P,
                               max_iter=self.max_cg_iter,
                               show_pbar=self.show_pbar,
                               leave_pbar=False).run()

            if self.proxg is None:
                backend.copyto(z2, self.G(self.x) + u2)
            else:
                backend.copyto(z2, self.proxg(1 / self.rho,
                                              self.G(self.x) + u2))

        I_z = linop.Identity(z.shape)
        self.alg = ADMM(minL_x,
                        minL_z,
                        self.x,
                        z,
                        u,
                        I_G,
                        -I_z,
                        0,
                        max_iter=self.max_iter)