Ejemplo n.º 1
0
    def testDiagonalization(self):
        n_rows = 5
        n_columns = n_rows
        number_eigenfunctions = 3

        matrix = createMatrix(n_rows, n_columns)

        for i_row in matrix.localRows():
            row = np.zeros(n_columns)
            row[i_row] = i_row
            matrix.setRow(i_row, row)

        matrix.assemble()

        eigenmoder = EigenmoderStartegySLEPc()
        eigenvalues, eigenvectors = eigenmoder.eigenfunctions(matrix,number_eigenfunctions)

        plan = matrix.distributionPlan()
        vector = ParallelVector(plan)

        for i in range(number_eigenfunctions):

            data = None
            if eigenvectors is not None:
                data = eigenvectors.globalRow(i)

            vector.broadcast(data=data, root=0)
            matrix.dot(vector)

            if eigenvectors is not None:
                accuracy = np.linalg.norm( vector.fullData() - eigenvalues[i] * eigenvectors.globalRow(i))
                self.assertLess(accuracy, 1e-12)
Ejemplo n.º 2
0
    def testDot(self):
        n_rows = 10
        n_columns = n_rows
        matrix = createMatrix(n_rows, n_columns)

        for i_row in matrix.localRows():
            row = np.zeros(n_columns)
            row[i_row] = i_row
            matrix.setRow(i_row, row)

        matrix.assemble()

        plan = matrix.distributionPlan()
        vector = ParallelVector(plan)


        for i_row in range(n_rows):
            local_data = np.zeros(len(matrix.localRows()),dtype=np.complex128)
            if i_row in matrix.localRows():
                i_local_index = matrix.distributionPlan().globalToLocalIndex(i_row)
                local_data[i_local_index] = 1.0

            vector.setCollective(local_data)
            matrix.dot(vector)

            full_row = np.zeros(n_columns,dtype=np.complex128)
            full_row[i_row] = i_row
            self.assertLess(np.linalg.norm(vector.fullData()-full_row), 1e-14)
Ejemplo n.º 3
0
    def eigenfunctions(self, matrix, number_modes):
        import sys, slepc4py

        slepc4py.init(sys.argv)

        from petsc4py import PETSc
        from slepc4py import SLEPc

        E = SLEPc.EPS()
        E.create()

        E.setOperators(matrix.petScMatrix())
        E.setProblemType(SLEPc.EPS.ProblemType.HEP)
        #E.setType(SLEPc.EPS.Type.ARNOLDI)
        E.setFromOptions()
        E.setTolerances(tol=1e-9, max_it=200)
        E.setDimensions(nev=number_modes)
        E.solve()

        Print = PETSc.Sys.Print

        iterations = E.getIterationNumber()
        self.log("Number of iterations of the method: %d" % iterations)

        eps_type = E.getType()
        self.log("Solution method: %s" % eps_type)

        nev, ncv, mpd = E.getDimensions()
        self.log("Number of requested eigenvalues: %d" % nev)

        tol, maxit = E.getTolerances()
        self.log("Stopping condition: tol=%.4g, maxit=%d" % (tol, maxit))

        nconv = E.getConverged()
        self.log("Number of converged eigenpairs %d" % nconv)

        eigenvalues = np.zeros(nconv, dtype=np.complex128)
        result_vector = ParallelVector(matrix.distributionPlan())
        plan = DistributionPlan(mpi.COMM_WORLD,
                                n_columns=matrix.totalShape()[1],
                                n_rows=nconv)
        eigenvectors_parallel = ParallelMatrix(plan)

        # Create the results vectors
        vr, wr = matrix.petScMatrix().getVecs()
        vi, wi = matrix.petScMatrix().getVecs()
        #
        for i in range(nconv):
            k = E.getEigenpair(i, vr, vi)

            result_vector.setCollective(vr.getArray())
            eigenvalues[i] = k

            if i in eigenvectors_parallel.localRows():
                eigenvectors_parallel.setRow(i, result_vector.fullData())

        return eigenvalues, eigenvectors_parallel
Ejemplo n.º 4
0
    def eigenfunctions(self, matrix, number_modes):
        import sys, slepc4py

        slepc4py.init(sys.argv)

        from petsc4py import PETSc
        from slepc4py import SLEPc

        E = SLEPc.EPS()
        E.create()

        E.setOperators(matrix.petScMatrix())
        E.setProblemType(SLEPc.EPS.ProblemType.HEP)
        #E.setType(SLEPc.EPS.Type.ARNOLDI)
        E.setFromOptions()
        E.setTolerances(tol=1e-9, max_it=200)
        E.setDimensions(nev=number_modes)
        E.solve()

        Print = PETSc.Sys.Print

        iterations = E.getIterationNumber()
        self.log("Number of iterations of the method: %d" % iterations)

        eps_type = E.getType()
        self.log("Solution method: %s" % eps_type)

        nev, ncv, mpd = E.getDimensions()
        self.log("Number of requested eigenvalues: %d" % nev)

        tol, maxit = E.getTolerances()
        self.log("Stopping condition: tol=%.4g, maxit=%d" % (tol, maxit))

        nconv = E.getConverged()
        self.log("Number of converged eigenpairs %d" % nconv)

        eigenvalues = np.zeros(nconv, dtype=np.complex128)
        result_vector = ParallelVector(matrix.distributionPlan())
        plan = DistributionPlan(mpi.COMM_WORLD, n_columns=matrix.totalShape()[1], n_rows=nconv)
        eigenvectors_parallel = ParallelMatrix(plan)

        # Create the results vectors
        vr, wr = matrix.petScMatrix().getVecs()
        vi, wi = matrix.petScMatrix().getVecs()
        #
        for i in range(nconv):
            k = E.getEigenpair(i, vr, vi)

            result_vector.setCollective(vr.getArray())
            eigenvalues[i] = k

            if i in eigenvectors_parallel.localRows():
                eigenvectors_parallel.setRow(i, result_vector.fullData())

        return eigenvalues, eigenvectors_parallel
Ejemplo n.º 5
0
    def __init__(self, parallel_matrix, parallel_vector=None):
        self._parallel_matrix = parallel_matrix

        if parallel_vector is None:
            self.setVector(ParallelVector(parallel_matrix.distributionPlan()))
        else:
            self.setVector(parallel_vector)
Ejemplo n.º 6
0
    def __init__(self, twoform):
        self._parent = twoform

        vector = self._parent.vector(0)
        self._n_size = vector.size
        self._petsc_matrix = PETSc.Mat().create()
        self._petsc_matrix.setSizes([self._n_size, self._n_size])
        self._petsc_matrix.setUp()

        self._parallel_matrix = ParallelMatrixPETSc(self._petsc_matrix)
        plan = self._parallel_matrix.distributionPlan()
        self._parent._distribution_plan = plan
        self._vector_in = ParallelVector(plan)
        self._vector_out = ParallelVector(plan)
        self._distribution_plan = DistributionPlan(
            communicator=mpi.COMM_WORLD,
            n_columns=self.dimensionSize(),
            n_rows=self.dimensionSize())
Ejemplo n.º 7
0
class PetScOperator(object):
    def __init__(self, auto_op):
        self._parent = auto_op
        self._distribution_plan = None

    def _init_vectors(self, petsc_vector):
        self._distribution_plan = DistributionPlanPETSc(communicator=mpi.COMM_WORLD, petsc_object=petsc_vector)
        self._parent._distribution_plan = self._distribution_plan
        self._vector_in = ParallelVector(self._distribution_plan)
        self._vector_out = ParallelVector(self._distribution_plan)

    def mult(self, A, x, y):
        xx = x.getArray(readonly=1)
        yy = y.getArray(readonly=0)

        if self._distribution_plan is None:
            self._init_vectors(x)

        self._vector_in.setCollective(xx)
        self._parent.dot(self._vector_in, self._vector_out)

        yy[:] = self._vector_out._local_data
Ejemplo n.º 8
0
    def arnoldi_iteration(self, H, Q, A, number_eigenvectors):
        H, Q, start_index, m = self._createIterationMatrices(H, Q, A, number_eigenvectors)

        qt_distribution_plan = DistributionPlan(mpi.COMM_WORLD, n_rows=A.totalShape()[0], n_columns=m)
        q_distribution_plan = DistributionPlan(mpi.COMM_WORLD, n_rows=m, n_columns=A.totalShape()[0])

        parallel_vector = ParallelVector(qt_distribution_plan)

        parallel_vector._full_data[:] = Q.globalRow(start_index-1)

        for k in range(start_index, m):
            A.dot(parallel_vector, parallel_vector)

            if k in Q.localRows():
                Q.setRow(k, parallel_vector.fullData())


            q_k = Q.globalRow(k)
            if k == m or True:
                for j in range(k):
                    q_j = Q.cachedGlobalRow(j)
                    H[j, k-1] = np.vdot(q_j, q_k)
                    q_k -= H[j, k-1] * q_j
            # else:
            #     pv = ParallelVector(qt_distribution_plan)
            #     pv2 = ParallelVector(q_distribution_plan)
            #     pv._full_data[:] = q_k
            #     Q.dot(pv,pv2,complex_conjugate=True)
            #     H[:, k-1] = pv2.fullData()[:]
            #
            #     p=H[:, k-1]
            #     p[k:]=0
            #     pv2._full_data[:] = p
            #     Q.dotForTransposed(pv2, pv)
            #
            #     q_k -= pv.fullData()
            #     H[k, k-1] = norm(q_k)

            Q.resetCacheGlobalRow()

            if k in Q.localRows():
                Q.setRow(k, q_k)

            row_data = Q.globalRow(k)
            H[k, k-1] = norm(row_data)

            norm_row_data = row_data / H[k, k-1]


            if k%100==0 and Q.distributionPlan().myRank()==0:
                print("Arnoldi iteration: %i/%i"% (k, m-1))
                sys.stdout.flush()

            # Is invariant null space
            if np.abs(H[k, k-1]) < 1e-100:
                break

            if k in Q.localRows():
                Q.setRow(k, norm_row_data)


            parallel_vector._full_data[:]= norm_row_data

        new_distribution_plan = DistributionPlan(mpi.COMM_WORLD, n_rows=k, n_columns=A.totalShape()[1])
        Q = Q.shrinkTo(new_distribution_plan)

        return H[0:k,0:k], Q
Ejemplo n.º 9
0
    def arnoldi(self, A, n = 25, accuracy=1e-8, accuracy_projection=None):

        n = min(A.totalShape()[0], n)

        # H: Hessenbergmatrix
        # Q: Schurbasis
        H = None
        Q = None

        my_rank = A.distributionPlan().myRank()

        for i in range(5):
            H, Q = self.arnoldi_iteration(H, Q, A, n)

            r = np.linalg.eigh(H)
            eig_val = r[0][::-1]
            eig_vec = r[1].transpose()[::-1, :]
            eig_vec = eig_vec.transpose()

            schur_vec = np.zeros((A.totalShape()[0], n), dtype=np.complex128)

            n = min(H.shape[0], n)

            q_distribution_plan = Q.distributionPlan()
            qt_distribution_plan = DistributionPlan(mpi.COMM_WORLD, n_rows=Q.totalShape()[1], n_columns=Q.totalShape()[0])

            parallel_vector_in = ParallelVector(q_distribution_plan)
            parallel_vector_out = ParallelVector(qt_distribution_plan)
            for i in range(n):
                t = eig_vec[:,i]
                full_data = np.append(eig_vec[:,i], np.zeros(Q.totalShape()[0]-eig_vec[:,i].shape[0]))
                parallel_vector_in._full_data[:] = full_data
                Q.dotForTransposed(parallel_vector_in, parallel_vector_out)
                schur_vec[:, i] = parallel_vector_out.fullData()


            parallel_vector_out._full_data[:] = schur_vec[:, n-1]

            A.dot(parallel_vector_out)
            acc = scipy.linalg.norm( parallel_vector_out.fullData()/eig_val.max() - (eig_val[n-1]/eig_val.max()) * schur_vec[:, n-1])

            acc2 = np.abs(H[-1, -2] / eig_val[n-2])

            if my_rank == 0:
                print("Accuracy last Schur/ritz vector for normalized matrix: %e"% acc)
                print("Accuracy projection vs smallest eigenvalue: %e"% acc2)

            if accuracy_projection is not None:
                if acc2 <= accuracy_projection and acc <= accuracy:
                    if my_rank == 0:
                        print("Converged")
                        sys.stdout.flush()
                    break
            else:
                if acc <= accuracy:
                    if my_rank == 0:
                        print("Converged")
                    sys.stdout.flush()
                    break

        return eig_val[0:n], schur_vec[:,0:n]
Ejemplo n.º 10
0
 def parallelDot(self, v):
     v_in = ParallelVector(self._distribution_plan)
     v_in.broadcast(v, root=0)
     self.dot(v_in, v_in)
     return v_in.fullData()
Ejemplo n.º 11
0
 def _init_vectors(self, petsc_vector):
     self._distribution_plan = DistributionPlanPETSc(communicator=mpi.COMM_WORLD, petsc_object=petsc_vector)
     self._parent._distribution_plan = self._distribution_plan
     self._vector_in = ParallelVector(self._distribution_plan)
     self._vector_out = ParallelVector(self._distribution_plan)
Ejemplo n.º 12
0
def createVector(rows=10, columns=10):
    plan = createDistributionPlan(rows, columns)
    vector = ParallelVector(plan)

    return vector