示例#1
0
    def test_cg_DLORegularizedGram(self):
        ATA = linop.DLORegularizedGram(self.A)
        AAT = linop.DLORegularizedGram(self.A, transpose=True)
        BBT = linop.DLORegularizedGram(self.B)
        BTB = linop.DLORegularizedGram(self.B, transpose=False)

        M_ATA = linop.DLODiagonal((1. / (self.rho + self.dATA)).persist())
        M_AAT = linop.DLODiagonal((1. / (self.rho + self.dAAT)).persist())
        M_BBT = linop.DLODiagonal((1. / (self.rho + self.dBBT)).persist())
        M_BTB = linop.DLODiagonal((1. / (self.rho + self.dBTB)).persist())

        for A, M in ((ATA, M_ATA), (AAT, M_AAT), (BBT, M_BBT), (BTB, M_BTB)):
            assert cg_reduces_residuals(A)
            assert cg_warmstart_reduces_iterations(A)
示例#2
0
def cg_project(A, x, y, tol=1e-8, **options):
    r""" Project (x, y) onto graph G = {(y, x) | y = Ax} via CG

    In particular, form outputs as:

        :math:`x_{out} = (1 + A^TA)^{-1}(A^Ty + x)`
        :math:`y_{out} = Ax_{out}`
    """
    fmt = 'array {} compatible'
    assert A.shape[0] == y.shape[0] and A.shape[1] == x.shape[0], fmt.format(
        'dims')
    assert A.chunks[0] == y.chunks[0] and A.chunks[1] == x.chunks[
        0], fmt.format('chunks')

    token = options.pop(
        'name', 'cg-project-' + dask.base.tokenize(A, x, y, tol, **options))
    nm_b, nm_x, nm_y = map(lambda nm: nm + '-' + token, ('b', 'x', 'y'))

    # b = A'y + x
    b = atoms2.gemv(1, A, y, 1, x, transpose=True, name=nm_b)
    A_hat = linop.DLORegularizedGram(A, transpose=False)
    x_out, res, iters = cg.cg_graph(A_hat, b, tol=tol, name=nm_x, **options)
    y_out = atoms2.dot(A, x_out, name=nm_y)
    x_out, y_out = dask.persist(x_out, y_out)
    return x_out, y_out, res, iters
示例#3
0
def test_jacobi_preconditioner():
    A_test = da.random.random((100, 100), chunks=20)
    d_test = da.diag(A_test)
    dd_test = da.diag(A_test.T.dot(A_test))
    assert is_inverse(pre.jacobi_preconditioner(A_test), d_test)
    assert is_inverse(pre.jacobi_preconditioner(linop.DLODense(A_test)),
                      d_test)
    assert is_inverse(pre.jacobi_preconditioner(linop.DLOGram(A_test)),
                      dd_test)
    assert is_inverse(
        pre.jacobi_preconditioner(linop.DLORegularizedGram(A_test)),
        1 + dd_test)
    mu = da.random.normal(1, 1, (), chunks=())
    assert is_inverse(
        pre.jacobi_preconditioner(
            linop.DLORegularizedGram(A_test, regularization=mu)), mu + dd_test)
示例#4
0
    def test_DLORegularizedGram_ops(self):
        rho = self.rho
        ATA = linop.DLORegularizedGram(self.A, regularization=rho)
        AAT = linop.DLORegularizedGram(self.A, regularization=rho, transpose=True)
        rhoATAx = rho * self.x + self.A.T.dot(self.A.dot(self.x))
        rhoAATy = rho * self.y + self.A.dot(self.A.T.dot(self.y))
        assert operations_consistent(ATA, self.x, self.x, rhoATAx, rhoATAx)
        assert operations_consistent(AAT, self.y, self.y, rhoAATy, rhoAATy)

        # fat
        BBT = linop.DLORegularizedGram(self.B, regularization=rho)
        BTB = linop.DLORegularizedGram(self.B, regularization=rho, transpose=False)
        BBTx = rho * self.x + self.B.dot(self.B.T.dot(self.x))
        BTBy = rho * self.y + self.B.T.dot(self.B.dot(self.y))
        assert operations_consistent(BBT, self.x, self.x, BBTx, BBTx)
        assert operations_consistent(BTB, self.y, self.y, BTBy, BTBy)
示例#5
0
def cgls(A, b, rho, **options):
    b_hat = atoms2.dot(A, b, transpose=True)
    A_hat = linop.DLORegularizedGram(A, regularization=rho, transpose=False)
    x, _, iters = cg_graph(A_hat, b_hat, **options)
    res = da.linalg.norm(atoms2.dot(A, x) - b).compute()
    return x, res, iters