def test_read_non_distributed_non_scattered_txt(self): cls = self.__class__ rank = cls.comm.getRank() nproc = cls.comm.getSize() if nproc > 1: mvector_filename = 'in_mvector_read_test_' + str(nproc) else: mvector_filename = 'in_mvector_read_test' file_dir = os.path.dirname(__file__) filename = 'input.yaml' problem = Utils.createAlbanyProblem(file_dir + '/' + filename, cls.parallelEnv) n_cols = 4 parameter_map = problem.getParameterMap(0) mvector = Utils.loadMVector(file_dir + '/' + mvector_filename, n_cols, parameter_map, distributedFile=False, useBinary=False, readOnRankZero=False) tol = 1e-8 mvector_target = np.array([1., -1, 3.26, -3.1]) * (rank + 1) for i in range(0, n_cols): self.assertTrue(np.abs(mvector[i, 0] - mvector_target[i]) < tol)
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename, cls.parallelEnv) parameter_map = problem.getParameterMap(0) parameter = Tpetra.MultiVector(parameter_map, 1, dtype="d") num_elems = parameter_map.getNodeNumElements() parameter[0, :] = 2.0*np.ones(num_elems) problem.performSolve() state_map = problem.getStateMap() state = Tpetra.MultiVector(state_map, 1, dtype="d") state[0, :] = problem.getState() state_ref = Utils.loadMVector('state_ref', 1, state_map, distributedFile=False, useBinary=False, readOnRankZero=True) stackedTimer = problem.getStackedTimer() setup_time = stackedTimer.accumulatedTime("PyAlbany: Setup Time") print("setup_time = " + str(setup_time)) tol = 1.e-8 self.assertTrue(np.linalg.norm(state_ref[0, :] - state[0,:]) < tol)
def test_read_non_distributed_npy(self): comm = Teuchos.DefaultComm.getComm() rank = comm.getRank() nproc = comm.getSize() if nproc > 1: mvector_filename = 'in_mvector_read_test_' + str(nproc) else: mvector_filename ='in_mvector_read_test' file_dir = os.path.dirname(__file__) filename = 'input.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename) n_cols = 4 parameter_map = problem.getParameterMap(0) mvector = Utils.loadMVector(file_dir+'/'+mvector_filename, n_cols, parameter_map, distributedFile = False) tol = 1e-8 mvector_target = np.array([1., -1, 3.26, -3.1])*(rank+1) for i in range(0, n_cols): self.assertTrue(np.abs(mvector[i,0]-mvector_target[i]) < tol)
"PyAlbany: Read multivector directions", "PyAlbany: Set directions", "PyAlbany: Perform Solve", "PyAlbany: Get Reduced Hessian", "PyAlbany: Write Reduced Hessian", "PyAlbany: Total"]) timers[6].start() timers[0].start() problem = Utils.createAlbanyProblem(filename) timers[0].stop() timers[1].start() n_directions=4 parameter_map = problem.getParameterMap(0) directions = Utils.loadMVector('random_directions', n_directions, parameter_map, distributedFile = False, useBinary = True) timers[1].stop() timers[2].start() problem.setDirections(parameter_index, directions) timers[2].stop() timers[3].start() problem.performSolve() timers[3].stop() timers[4].start() hessian = problem.getReducedHessian(response_index, parameter_index) timers[4].stop() timers[5].start()
def main(parallelEnv): # This example illustrates how PyAlbany can be used to compute # reduced Hessian-vector products w.r.t to the basal friction. comm = parallelEnv.comm rank = comm.getRank() nprocs = comm.getSize() file_dir = os.path.dirname(__file__) filename = 'input_fo_gis_analysis_beta_smbT.yaml' parameter_index = 0 response_index = 0 timers = Utils.createTimers([ "PyAlbany: Create Albany Problem", "PyAlbany: Read multivector directions", "PyAlbany: Set directions", "PyAlbany: Perform Solve", "PyAlbany: Get Reduced Hessian", "PyAlbany: Write Reduced Hessian", "PyAlbany: Total" ]) timers[6].start() timers[0].start() problem = Utils.createAlbanyProblem(filename, parallelEnv) timers[0].stop() timers[1].start() n_directions = 4 parameter_map = problem.getParameterMap(0) directions = Utils.loadMVector('random_directions', n_directions, parameter_map, distributedFile=False, useBinary=True) timers[1].stop() timers[2].start() problem.setDirections(parameter_index, directions) timers[2].stop() timers[3].start() problem.performSolve() timers[3].stop() timers[4].start() hessian = problem.getReducedHessian(response_index, parameter_index) timers[4].stop() timers[5].start() Utils.writeMVector("hessian_nprocs_" + str(nprocs), hessian, distributedFile=True, useBinary=False) Utils.writeMVector("hessian_all_nprocs_" + str(nprocs), hessian, distributedFile=False, useBinary=False) timers[5].stop() print(hessian[0, 0]) print(hessian[1, 0]) print(hessian[2, 0]) print(hessian[3, 0]) timers[6].stop() Utils.printTimers(timers, "timers_nprocs_" + str(nprocs) + ".txt")