def test_write_non_distributed_npy(self): cls = self.__class__ rank = cls.comm.getRank() nproc = cls.comm.getSize() if nproc > 1: mvector_filename = 'out_mvector_write_test_' + str(nproc) else: mvector_filename = 'out_mvector_write_test' file_dir = os.path.dirname(__file__) filename = 'input.yaml' problem = Utils.createAlbanyProblem(file_dir + '/' + filename, cls.parallelEnv) n_cols = 4 parameter_map = problem.getParameterMap(0) mvector = Tpetra.MultiVector(parameter_map, n_cols, dtype="d") mvector[0, :] = 1. * (rank + 1) mvector[1, :] = -1. * (rank + 1) mvector[2, :] = 3.26 * (rank + 1) mvector[3, :] = -3.1 * (rank + 1) Utils.writeMVector(file_dir + '/' + mvector_filename, mvector, distributedFile=False, useBinary=True)
def test_read_non_distributed_non_scattered_txt(self): cls = self.__class__ rank = cls.comm.getRank() nproc = cls.comm.getSize() if nproc > 1: mvector_filename = 'in_mvector_read_test_' + str(nproc) else: mvector_filename = 'in_mvector_read_test' file_dir = os.path.dirname(__file__) filename = 'input.yaml' problem = Utils.createAlbanyProblem(file_dir + '/' + filename, cls.parallelEnv) n_cols = 4 parameter_map = problem.getParameterMap(0) mvector = Utils.loadMVector(file_dir + '/' + mvector_filename, n_cols, parameter_map, distributedFile=False, useBinary=False, readOnRankZero=False) tol = 1e-8 mvector_target = np.array([1., -1, 3.26, -3.1]) * (rank + 1) for i in range(0, n_cols): self.assertTrue(np.abs(mvector[i, 0] - mvector_target[i]) < tol)
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename, cls.parallelEnv) n_vecs = 4 parameter_map = problem.getParameterMap(0) num_elems = parameter_map.getNodeNumElements() # generate vectors with random entries omega = Tpetra.MultiVector(parameter_map, n_vecs, dtype="d") for i in range(n_vecs): omega[i,:] = np.random.randn(num_elems) # call the orthonormalization method wpa.orthogTpMVecs(omega, 2) # check that the vectors are now orthonormal tol = 1.e-12 for i in range(n_vecs): for j in range(i+1): omegaiTomegaj = Utils.inner(omega[i,:], omega[j,:], cls.comm) if rank == 0: if i == j: self.assertTrue(abs(omegaiTomegaj - 1.0) < tol) else: self.assertTrue(abs(omegaiTomegaj-0.0) < tol)
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename, cls.parallelEnv) parameter_map = problem.getParameterMap(0) parameter = Tpetra.MultiVector(parameter_map, 1, dtype="d") num_elems = parameter_map.getNodeNumElements() parameter[0, :] = 2.0*np.ones(num_elems) problem.performSolve() state_map = problem.getStateMap() state = Tpetra.MultiVector(state_map, 1, dtype="d") state[0, :] = problem.getState() state_ref = Utils.loadMVector('state_ref', 1, state_map, distributedFile=False, useBinary=False, readOnRankZero=True) stackedTimer = problem.getStackedTimer() setup_time = stackedTimer.accumulatedTime("PyAlbany: Setup Time") print("setup_time = " + str(setup_time)) tol = 1.e-8 self.assertTrue(np.linalg.norm(state_ref[0, :] - state[0,:]) < tol)
def main(parallelEnv): comm = MPI.COMM_WORLD myGlobalRank = comm.rank # Create an Albany problem: filename = "input_dirichletT.yaml" parameter = Utils.createParameterList( filename, parallelEnv ) problem = Utils.createAlbanyProblem(parameter, parallelEnv) parameter_map_0 = problem.getParameterMap(0) parameter_0 = Tpetra.Vector(parameter_map_0, dtype="d") N = 200 p_min = -2. p_max = 2. # Generate N samples randomly chosen in [p_min, p_max]: p = np.random.uniform(p_min, p_max, N) QoI = np.zeros((N,)) # Loop over the N samples and evaluate the quantity of interest: for i in range(0, N): parameter_0[0] = p[i] problem.setParameter(0, parameter_0) problem.performSolve() response = problem.getResponse(0) QoI[i] = response.getData()[0] if myGlobalRank == 0: if printPlot: f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(16,6)) ax1.hist(p) ax1.set_ylabel('Counts') ax1.set_xlabel('Random parameter') ax2.scatter(p, QoI) ax2.set_ylabel('Quantity of interest') ax2.set_xlabel('Random parameter') ax3.hist(QoI) ax3.set_ylabel('Counts') ax3.set_xlabel('Quantity of interest') plt.savefig('UQ.jpeg', dpi=800) plt.close()
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename, cls.parallelEnv) n_directions = 4 parameter_map = problem.getParameterMap(0) directions = Tpetra.MultiVector(parameter_map, n_directions, dtype="d") directions[0,:] = 1. directions[1,:] = -1. directions[2,:] = 3. directions[3,:] = -3. problem.setDirections(0, directions) problem.performSolve() response = problem.getResponse(0) sensitivity = problem.getSensitivity(0, 0) hessian = problem.getReducedHessian(0, 0) g_target = 3.23754626955999991e-01 norm_target = 8.94463776843999921e-03 h_target = np.array([0.009195356672103817, 0.009195356672103817, 0.027586070971800013, 0.027586070971800013]) g_data = response.getData() norm = Utils.norm(sensitivity.getData(0), cls.comm) print("g_target = " + str(g_target)) print("g_data[0] = " + str(g_data[0])) print("norm = " + str(norm)) print("norm_target = " + str(norm_target)) hessian_norms = np.zeros((n_directions,)) for i in range(0,n_directions): hessian_norms[i] = Utils.norm(hessian.getData(i), cls.comm) tol = 1e-8 if rank == 0: self.assertTrue(np.abs(g_data[0]-g_target) < tol) self.assertTrue(np.abs(norm-norm_target) < tol) for i in range(0,n_directions): self.assertTrue(np.abs(hessian_norms[i]-h_target[i]) < tol)
def test_all(self): comm = Teuchos.DefaultComm.getComm() rank = comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(file_dir + '/' + filename) n_directions = 4 parameter_map = problem.getParameterMap(0) directions = Tpetra.MultiVector(parameter_map, n_directions, dtype="d") directions[0, :] = 1. directions[1, :] = -1. directions[2, :] = 3. directions[3, :] = -3. problem.setDirections(0, directions) problem.performSolve() response = problem.getResponse(0) sensitivity = problem.getSensitivity(0, 0) hessian = problem.getReducedHessian(0, 0) g_target = 3.23754626955999991e-01 norm_target = 8.94463776843999921e-03 h_target = np.array([ 4.2121719763904516e-05, -4.21216874727712e-05, 0.00012636506241831498, -0.00012636506241831496 ]) g_data = response.getData() norm = Utils.norm(sensitivity.getData(0), comm) print("g_target = " + str(g_target)) print("g_data[0] = " + str(g_data[0])) print("norm = " + str(norm)) print("norm_target = " + str(norm_target)) tol = 1e-8 if rank == 0: self.assertTrue(np.abs(g_data[0] - g_target) < tol) self.assertTrue(np.abs(norm - norm_target) < tol) for i in range(0, n_directions): self.assertTrue(np.abs(hessian[i, 0] - h_target[i]) < tol)
def main(parallelEnv): comm = Teuchos.DefaultComm.getComm() filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) # We can get from the Albany problem the map of a distributed parameter: parameter_map = problem.getParameterMap(0) # This map can then be used to construct an RCP to a Tpetra::Multivector: m_directions = 4 directions = Tpetra.MultiVector(parameter_map, m_directions, dtype="d") # Numpy operations, such as assignments, can then be performed on the local entries: directions[0, :] = 1. # Set all entries of v_0 to 1 directions[1, :] = -1. # Set all entries of v_1 to -1 directions[2, :] = 3. # Set all entries of v_2 to 3 directions[3, :] = -3. # Set all entries of v_3 to -3 # Now that we have an RCP to the directions, we provide it to the Albany problem: problem.setDirections(0, directions) # Finally, we can solve the problem (which includes applying the Hessian to the directions) # and get the Hessian-vector products: problem.performSolve() hessian = problem.getReducedHessian(0, 0)
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = "input_dirichlet_mixed_paramsT.yaml" parameter = Utils.createParameterList(file_dir + "/" + filename, cls.parallelEnv) parameter.sublist("Discretization").set("1D Elements", 10) parameter.sublist("Discretization").set("2D Elements", 10) problem = Utils.createAlbanyProblem(parameter, cls.parallelEnv) parameter_map_0 = problem.getParameterMap(0) para_0_new = Tpetra.Vector(parameter_map_0, dtype="d") parameter_map_1 = problem.getParameterMap(1) para_1_new = Tpetra.Vector(parameter_map_1, dtype="d") para_1_new[:] = 0.333333 n_values = 5 para_0_values = np.linspace(-1, 1, n_values) responses = np.zeros((n_values, )) responses_target = np.array( [0.69247527, 0.48990929, 0.35681844, 0.29320271, 0.2990621]) tol = 1e-8 for i in range(0, n_values): para_0_new[:] = para_0_values[i] problem.setParameter(0, para_0_new) problem.performSolve() response = problem.getResponse(0) responses[i] = response.getData()[0] print("p = " + str(para_0_values)) print("QoI = " + str(responses)) if rank == 0: self.assertTrue( np.abs(np.amax(responses - responses_target)) < tol)
def main(parallelEnv): filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) # Now that the Albany problem is constructed, we can solve # it and evaluate the response: problem.performSolve() response = problem.getResponse(0) print(response)
def test_read_non_distributed_npy(self): comm = Teuchos.DefaultComm.getComm() rank = comm.getRank() nproc = comm.getSize() if nproc > 1: mvector_filename = 'in_mvector_read_test_' + str(nproc) else: mvector_filename ='in_mvector_read_test' file_dir = os.path.dirname(__file__) filename = 'input.yaml' problem = Utils.createAlbanyProblem(file_dir+'/'+filename) n_cols = 4 parameter_map = problem.getParameterMap(0) mvector = Utils.loadMVector(file_dir+'/'+mvector_filename, n_cols, parameter_map, distributedFile = False) tol = 1e-8 mvector_target = np.array([1., -1, 3.26, -3.1])*(rank+1) for i in range(0, n_cols): self.assertTrue(np.abs(mvector[i,0]-mvector_target[i]) < tol)
def main(parallelEnv): comm = parallelEnv.comm filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) problem.performSolve() # We can solve the problem and extract the sensitivity w.r.t. a parameter: sensitivity = problem.getSensitivity(0, 0) # In this example, we illustrate how to return values as output without # relying on Kokkos-related object; the local data of the vectors are deeply # copied to a new numpy array: sensitivity_out = np.copy(sensitivity[0, :]) return sensitivity_out
def test_all(self): cls = self.__class__ rank = cls.comm.getRank() file_dir = os.path.dirname(__file__) # Create an Albany problem: filename = "input_dirichlet_mixed_paramsT.yaml" parameter = Utils.createParameterList( file_dir + "/" + filename, cls.parallelEnv ) parameter.sublist("Discretization").set("1D Elements", 10) parameter.sublist("Discretization").set("2D Elements", 10) problem = Utils.createAlbanyProblem(parameter, cls.parallelEnv) g_target_before = 0.35681844 g_target_after = 0.17388298 g_target_2 = 0.19570272 p_0_target = 0.39886689 p_1_norm_target = 5.37319376038225 tol = 1e-8 problem.performSolve() response_before_analysis = problem.getResponse(0) problem.performAnalysis() para_0 = problem.getParameter(0) para_1 = problem.getParameter(1) print(para_0.getData()) print(para_1.getData()) para_1_norm = Utils.norm(para_1.getData(), cls.comm) print(para_1_norm) if rank == 0: self.assertTrue(np.abs(para_0[0] - p_0_target) < tol) self.assertTrue(np.abs(para_1_norm - p_1_norm_target) < tol) problem.performSolve() response_after_analysis = problem.getResponse(0) print("Response before analysis " + str(response_before_analysis.getData())) print("Response after analysis " + str(response_after_analysis.getData())) if rank == 0: self.assertTrue(np.abs(response_before_analysis[0] - g_target_before) < tol) self.assertTrue(np.abs(response_after_analysis[0] - g_target_after) < tol) parameter_map_0 = problem.getParameterMap(0) para_0_new = Tpetra.Vector(parameter_map_0, dtype="d") para_0_new[:] = 0.0 problem.setParameter(0, para_0_new) problem.performSolve() response = problem.getResponse(0) print("Response after setParameter " + str(response.getData())) if rank == 0: self.assertTrue(np.abs(response[0] - g_target_2) < tol)
def setUpClass(cls): cls.comm = Teuchos.DefaultComm.getComm() cls.parallelEnv = Utils.createDefaultParallelEnv(cls.comm)
def test_all(self): debug = True cls = self.__class__ myGlobalRank = cls.comm.getRank() nproc = cls.comm.getSize() # Create an Albany problem: n_params = 2 filename = "thermal_steady.yaml" parameter = Utils.createParameterList( filename, cls.parallelEnv ) problem = Utils.createAlbanyProblem(parameter, cls.parallelEnv) # ---------------------------------------------- # # 1. Evaluation of the theta star # # ---------------------------------------------- l_min = 0. l_max = 2. n_l = 3 l = np.linspace(l_min, l_max, n_l) theta_star, I_star, F_star, P_star = ee.evaluateThetaStar(l, problem, n_params) # ---------------------------------------------- # # 2. Evaluation of the prefactor using IS # # ---------------------------------------------- N_samples = 10 mean = np.array([1., 1.]) cov = np.array([[1., 0.], [0., 1.]]) np.random.seed(41) samples = np.random.multivariate_normal(mean, cov, N_samples) angle_1 = 0.49999*np.pi angle_2 = np.pi - angle_1 P_IS = ee.importanceSamplingEstimator(mean, cov, theta_star, F_star, P_star, samples, problem) P_mixed = ee.mixedImportanceSamplingEstimator(mean, cov, theta_star, F_star, P_star, samples, problem, angle_1, angle_2) if myGlobalRank == 0: expected_theta_star = np.loadtxt('expected_theta_star_steady_'+str(nproc)+'.txt') expected_I_star = np.loadtxt('expected_I_star_steady_'+str(nproc)+'.txt') expected_P_star = np.loadtxt('expected_P_star_steady_'+str(nproc)+'.txt') expected_F_star = np.loadtxt('expected_F_star_steady_'+str(nproc)+'.txt') expected_P_IS = np.loadtxt('expected_P_steady_IS_'+str(nproc)+'.txt') expected_P_mixed = np.loadtxt('expected_P_steady_mixed_'+str(nproc)+'.txt') tol = 1e-8 tol_F = 5e-5 if debug: for i in range(0, len(expected_theta_star)): print('i = ' + str(i) + ': theta star: expected value = ' + str(expected_theta_star[i]) + ', computed value = ' + str(theta_star[i]) + ', and diff = ' + str(expected_theta_star[i]-theta_star[i])) print('i = ' + str(i) + ': I star: expected value = ' + str(expected_I_star[i]) + ', computed value = ' + str(I_star[i]) + ', and diff = ' + str(expected_I_star[i]-I_star[i])) print('i = ' + str(i) + ': P star: expected value = ' + str(expected_P_star[i]) + ', computed value = ' + str(P_star[i]) + ', and diff = ' + str(expected_P_star[i]-P_star[i])) print('i = ' + str(i) + ': F star: expected value = ' + str(expected_F_star[i]) + ', computed value = ' + str(F_star[i]) + ', and diff = ' + str(expected_F_star[i]-F_star[i])) print('i = ' + str(i) + ': P IS: expected value = ' + str(expected_P_IS[i]) + ', computed value = ' + str(P_IS[i]) + ', and diff = ' + str(expected_P_IS[i]-P_IS[i])) print('i = ' + str(i) + ': P mixed: expected value = ' + str(expected_P_mixed[i]) + ', computed value = ' + str(P_mixed[i]) + ', and diff = ' + str(expected_P_mixed[i]-P_mixed[i])) self.assertTrue(np.amax(np.abs(expected_theta_star - theta_star)) < tol) self.assertTrue(np.amax(np.abs(expected_I_star - I_star)) < tol) self.assertTrue(np.amax(np.abs(expected_P_star - P_star)) < tol) self.assertTrue(np.amax(np.abs(expected_F_star - F_star)) < tol_F) self.assertTrue(np.amax(np.abs(expected_P_IS - P_IS)) < tol) self.assertTrue(np.amax(np.abs(expected_P_mixed - P_mixed)) < tol)
for j in range(0, nTimers): mean_timers_sec[i, j] = np.mean(timers_sec[i, j, :]) speedUp[i, :] = mean_timers_sec[0, :] / (mean_timers_sec[i, :]) print('timers') print(mean_timers_sec) print('speed up') print(speedUp) if printPlot: fig = plt.figure(figsize=(10, 6)) plt.plot(np.arange(1, nMaxProcs + 1), np.arange(1, nMaxProcs + 1), '--') for j in range(0, nTimers): plt.plot(np.arange(1, nMaxProcs + 1), speedUp[:, j], 'o-', label=timerNames[j]) plt.ylabel('speed up') plt.xlabel('number of MPI processes') plt.grid(True) plt.legend() plt.savefig('speedup.jpeg', dpi=800) plt.close() if __name__ == "__main__": comm = Teuchos.DefaultComm.getComm() parallelEnv = Utils.createDefaultParallelEnv(comm) main(parallelEnv)
def main(parallelEnv): comm = MPI.COMM_WORLD nMaxProcs = comm.Get_size() myGlobalRank = comm.rank timerNames = [ "PyAlbany: Create Albany Problem", "PyAlbany: Set directions", "PyAlbany: Perform Solve", "PyAlbany: Total" ] nTimers = len(timerNames) # number of times that the test is repeated for a fixed # number of MPI processes N = 10 timers_sec = np.zeros((nMaxProcs, nTimers, N)) mean_timers_sec = np.zeros((nMaxProcs, nTimers)) speedUp = np.zeros((nMaxProcs, nTimers)) for nProcs in range(1, nMaxProcs + 1): newGroup = comm.group.Incl(np.arange(0, nProcs)) newComm = comm.Create_group(newGroup) if myGlobalRank < nProcs: parallelEnv.comm = Teuchos.MpiComm(newComm) for i_test in range(0, N): timers = Utils.createTimers(timerNames) timers[3].start() timers[0].start() filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) timers[0].stop() timers[1].start() n_directions = 4 parameter_map = problem.getParameterMap(0) directions = Tpetra.MultiVector(parameter_map, n_directions, dtype="d") directions[0, :] = 1. directions[1, :] = -1. directions[2, :] = 3. directions[3, :] = -3. problem.setDirections(0, directions) timers[1].stop() timers[2].start() problem.performSolve() timers[2].stop() timers[3].stop() if myGlobalRank == 0: for j in range(0, nTimers): timers_sec[nProcs - 1, j, i_test] = timers[j].totalElapsedTime() if myGlobalRank == 0: for i in range(0, nMaxProcs): for j in range(0, nTimers): mean_timers_sec[i, j] = np.mean(timers_sec[i, j, :]) speedUp[i, :] = mean_timers_sec[0, :] / (mean_timers_sec[i, :]) print('timers') print(mean_timers_sec) print('speed up') print(speedUp) if printPlot: fig = plt.figure(figsize=(10, 6)) plt.plot(np.arange(1, nMaxProcs + 1), np.arange(1, nMaxProcs + 1), '--') for j in range(0, nTimers): plt.plot(np.arange(1, nMaxProcs + 1), speedUp[:, j], 'o-', label=timerNames[j]) plt.ylabel('speed up') plt.xlabel('number of MPI processes') plt.grid(True) plt.legend() plt.savefig('speedup.jpeg', dpi=800) plt.close()
def test_all(self): debug = True cls = self.__class__ myGlobalRank = cls.comm.getRank() nproc = cls.comm.getSize() # Create an Albany problem: n_params = 2 filename = "thermal_steady_hessian.yaml" parameter = Utils.createParameterList( filename, cls.parallelEnv ) problem = Utils.createAlbanyProblem(parameter, cls.parallelEnv) # ---------------------------------------------- # # 1. Evaluation of the theta star # # ---------------------------------------------- l_min = 1. l_max = 2. n_l = 4 p = 0.25 l = l_min + np.power(np.linspace(0.0, 1.0, n_l), p) * (l_max-l_min) theta_star, I_star, F_star, P_star = ee.evaluateThetaStar(l, problem, n_params) # ---------------------------------------------- # # 2. Evaluation of the prefactor using SO # # ---------------------------------------------- mean = np.array([1., 1.]) cov = np.array([[1., 0.], [0., 1.]]) P_SO = ee.secondOrderEstimator(mean, cov, l, theta_star, I_star, F_star, P_star, problem) if myGlobalRank == 0: expected_theta_star = np.loadtxt('expected_theta_star_steady_hessian_'+str(nproc)+'.txt') expected_I_star = np.loadtxt('expected_I_star_steady_hessian_'+str(nproc)+'.txt') expected_P_star = np.loadtxt('expected_P_star_steady_hessian_'+str(nproc)+'.txt') expected_F_star = np.loadtxt('expected_F_star_steady_hessian_'+str(nproc)+'.txt') expected_P_SO = np.loadtxt('expected_P_steady_hessian_SO_'+str(nproc)+'.txt') tol = 1e-6 if debug: for i in range(0, len(expected_theta_star)): print('i = ' + str(i) + ': theta star: expected value = ' + str(expected_theta_star[i]) + ', computed value = ' + str(theta_star[i]) + ', and diff = ' + str(expected_theta_star[i]-theta_star[i])) print('i = ' + str(i) + ': I star: expected value = ' + str(expected_I_star[i]) + ', computed value = ' + str(I_star[i]) + ', and diff = ' + str(expected_I_star[i]-I_star[i])) print('i = ' + str(i) + ': P star: expected value = ' + str(expected_P_star[i]) + ', computed value = ' + str(P_star[i]) + ', and diff = ' + str(expected_P_star[i]-P_star[i])) print('i = ' + str(i) + ': F star: expected value = ' + str(expected_F_star[i]) + ', computed value = ' + str(F_star[i]) + ', and diff = ' + str(expected_F_star[i]-F_star[i])) print('i = ' + str(i) + ': P SO: expected value = ' + str(expected_P_SO[i]) + ', computed value = ' + str(P_SO[i]) + ', and diff = ' + str(expected_P_SO[i]-P_SO[i])) self.assertTrue(np.amax(np.abs(expected_theta_star - theta_star)) < tol) self.assertTrue(np.amax(np.abs(expected_I_star - I_star)) < tol) self.assertTrue(np.amax(np.abs(expected_P_star - P_star)) < tol) self.assertTrue(np.amax(np.abs(expected_F_star - F_star)) < tol) self.assertTrue(np.amax(np.abs(expected_P_SO - P_SO)) < tol)
def main(parallelEnv): # This example illustrates how PyAlbany can be used to compute # reduced Hessian-vector products w.r.t to the basal friction. comm = parallelEnv.comm rank = comm.getRank() nprocs = comm.getSize() file_dir = os.path.dirname(__file__) filename = 'input_fo_gis_analysis_beta_smbT.yaml' parameter_index = 0 response_index = 0 timers = Utils.createTimers([ "PyAlbany: Create Albany Problem", "PyAlbany: Read multivector directions", "PyAlbany: Set directions", "PyAlbany: Perform Solve", "PyAlbany: Get Reduced Hessian", "PyAlbany: Write Reduced Hessian", "PyAlbany: Total" ]) timers[6].start() timers[0].start() problem = Utils.createAlbanyProblem(filename, parallelEnv) timers[0].stop() timers[1].start() n_directions = 4 parameter_map = problem.getParameterMap(0) directions = Utils.loadMVector('random_directions', n_directions, parameter_map, distributedFile=False, useBinary=True) timers[1].stop() timers[2].start() problem.setDirections(parameter_index, directions) timers[2].stop() timers[3].start() problem.performSolve() timers[3].stop() timers[4].start() hessian = problem.getReducedHessian(response_index, parameter_index) timers[4].stop() timers[5].start() Utils.writeMVector("hessian_nprocs_" + str(nprocs), hessian, distributedFile=True, useBinary=False) Utils.writeMVector("hessian_all_nprocs_" + str(nprocs), hessian, distributedFile=False, useBinary=False) timers[5].stop() print(hessian[0, 0]) print(hessian[1, 0]) print(hessian[2, 0]) print(hessian[3, 0]) timers[6].stop() Utils.printTimers(timers, "timers_nprocs_" + str(nprocs) + ".txt")
# First, some Python packages are imported: from PyTrilinos import Tpetra from PyTrilinos import Teuchos import numpy as np from PyAlbany import Utils # Then, the parallel environment is initialized (including Kokkos): comm = Teuchos.DefaultComm.getComm() parallelEnv = Utils.createDefaultParallelEnv(comm, n_threads=-1, n_numa=-1, device_id=-1) # (Kokkos finalize will be called during the destruction of parallelEnv; # we will have to enforce that this destructor is called after the destruction # of every object which relies on Kokkos.) # Finally, given a filename and the parallel environment, an Albany problem is constructed: filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) # Now, we call the problem destructor first (by setting the RCP to null): problem = None # And we call the parallelEnv destructor: parallelEnv = None
comm = Teuchos.DefaultComm.getComm() rank = comm.getRank() nprocs = comm.getSize() file_dir = os.path.dirname(__file__) filename = 'input_fo_gis_analysis_beta_smbT.yaml' parameter_index = 0 response_index = 0 timers = Utils.createTimers(["PyAlbany: Create Albany Problem", "PyAlbany: Read multivector directions", "PyAlbany: Set directions", "PyAlbany: Perform Solve", "PyAlbany: Get Reduced Hessian", "PyAlbany: Write Reduced Hessian", "PyAlbany: Total"]) timers[6].start() timers[0].start() problem = Utils.createAlbanyProblem(filename) timers[0].stop() timers[1].start() n_directions=4 parameter_map = problem.getParameterMap(0) directions = Utils.loadMVector('random_directions', n_directions, parameter_map, distributedFile = False, useBinary = True) timers[1].stop()
import numpy as np from PyAlbany import Utils # As discussed in example_0.py parallelEnv should be destructed # after the destruction of all the other variables which rely on # Kokkos. # A better way to enforce this than calling the destructor explicitly # for all the variables is to rely on the garbage collector capability # of python using the scope of a function. # At the end of the function all the internal objects (all objects which # are not inputs or outputs) are destructed. # Therefore, we can create a function which takes parallelEnv as input in which # all Kokkos-related objects will be created and will not be passed as output; # they will necessarily be destructed before parallelEnv. # This example revisits example_0.py with the above-mentioned approach. def main(parallelEnv): filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv) if __name__ == "__main__": comm = Teuchos.DefaultComm.getComm() parallelEnv = Utils.createDefaultParallelEnv(comm, n_threads=-1, n_numa=-1, device_id=-1) main(parallelEnv)
def main(parallelEnv): filename = 'input_conductivity_dist_paramT.yaml' problem = Utils.createAlbanyProblem(filename, parallelEnv)
def main(parallelEnv): comm = MPI.COMM_WORLD myGlobalRank = comm.rank # Create an Albany problem: n_params = 2 filename = "thermal_steady_2.yaml" parameter = Utils.createParameterList(filename, parallelEnv) problem = Utils.createAlbanyProblem(parameter, parallelEnv) # ---------------------------------------------- # # 1. Evaluation of the theta star # # ---------------------------------------------- l_min = 8. l_max = 20. n_l = 5 p = 1. l = l_min + np.power(np.linspace(0.0, 1.0, n_l), p) * (l_max - l_min) theta_star, I_star, F_star, P_star = ee.evaluateThetaStar( l, problem, n_params) np.savetxt('theta_star_steady_2.txt', theta_star) np.savetxt('I_star_steady_2.txt', I_star) np.savetxt('P_star_steady_2.txt', P_star) np.savetxt('F_star_steady_2.txt', F_star) # ---------------------------------------------- # # 2. Evaluation of the prefactor using IS # # ---------------------------------------------- N_samples = 100 mean = np.array([1., 1.]) cov = np.array([[1., 0.], [0., 1.]]) samples = np.random.multivariate_normal(mean, cov, N_samples) angle_1 = 0.49999 * np.pi angle_2 = np.pi - angle_1 P_IS = ee.importanceSamplingEstimator(mean, cov, theta_star, F_star, P_star, samples, problem) P_mixed = ee.mixedImportanceSamplingEstimator(mean, cov, theta_star, F_star, P_star, samples, problem, angle_1, angle_2) P_SO = ee.secondOrderEstimator(mean, cov, l, theta_star, I_star, F_star, P_star, problem) np.savetxt('P_steady_IS_2.txt', P_IS) np.savetxt('P_steady_mixed_2.txt', P_mixed) np.savetxt('P_steady_SO_2.txt', P_SO) problem.reportTimers() # ---------------------------------------------- # # 3. Plots # # ---------------------------------------------- if n_params == 2: X = np.arange(1, 7, 0.2) Y = np.arange(1, 7, 0.25) Z1, Z2 = evaluate_responses(X, Y, problem, True) X, Y = np.meshgrid(X, Y) if myGlobalRank == 0: if printPlot: plt.figure() plt.semilogy(F_star, P_star, 'k*-') plt.semilogy(F_star, P_IS, 'b*-') plt.semilogy(F_star, P_mixed, 'r*--') plt.semilogy(F_star, P_SO, 'g*-') plt.savefig('extreme_steady_2.jpeg', dpi=800) plt.close() if n_params == 2: plt.figure() plt.plot(theta_star[:, 0], theta_star[:, 1], '*-') plt.contour(X, Y, Z1, levels=I_star, colors='g') plt.contour(X, Y, Z2, levels=F_star, colors='r') plt.savefig('theta_star_2.jpeg', dpi=800) plt.close() fig = plt.figure() ax = fig.gca(projection='3d') ax.plot_surface(X, Y, Z1) plt.savefig('Z1_2.jpeg', dpi=800) plt.close() fig = plt.figure() ax = fig.gca(projection='3d') ax.plot_surface(X, Y, Z2) plt.savefig('Z2_2.jpeg', dpi=800) plt.close()