def plot_error_and_positions(uinit, stats, a0): extract_stats = filter_stats(stats, type='energy') sortedlist_stats = sort_stats(extract_stats, sortby='time') R0 = np.linalg.norm(uinit.pos.values[:]) H0 = 1 / 2 * np.dot(uinit.vel.values[:], uinit.vel.values[:]) + a0 / R0 energy_err = [abs(entry[1] - H0) / H0 for entry in sortedlist_stats] plt.figure() plt.plot(energy_err, 'bo--') plt.xlabel('Time') plt.ylabel('Error in hamiltonian') plt.savefig('spiraling_particle_error_ham.png', rasterized=True, transparent=True, bbox_inches='tight') extract_stats = filter_stats(stats, type='position') sortedlist_stats = sort_stats(extract_stats, sortby='time') xpositions = [item[1][0] for item in sortedlist_stats] ypositions = [item[1][1] for item in sortedlist_stats] plt.figure() plt.xlim([-1.5, 1.5]) plt.ylim([-1.5, 1.5]) plt.xlabel('x') plt.ylabel('y') plt.scatter(xpositions, ypositions) plt.savefig('spiraling_particle_positons.png', rasterized=True, transparent=True, bbox_inches='tight') plt.show()
def run_reference(Tend): """ Routine to run particular SDC variant Args: Tend (float): end time for dumping """ # load (incomplete) default parameters description, controller_params = setup_parameters_FFT() # setup parameters "in time" t0 = 0 # instantiate controller controller = allinclusive_multigrid_nonMPI( num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) print() computed_radii_tmp = sort_stats(filter_stats(stats, type='computed_radius'), sortby='time') computed_radii = np.array([item0[1] for item0 in computed_radii_tmp]) print(len(computed_radii_tmp), len(computed_radii)) fname = 'data/AC_reference_FFT_Tend{:.1e}'.format(Tend) np.savez_compressed(file=fname, uend=uend.values, radius=computed_radii)
def main(): """ A simple test program to compare SDC with two flavors of MLSDC for particle dynamics """ # run SDC, MLSDC and MLSDC plus f-interpolation and compare stats_sdc, time_sdc = run_penning_trap_simulation(mlsdc=False) stats_mlsdc, time_mlsdc = run_penning_trap_simulation(mlsdc=True) stats_mlsdc_finter, time_mlsdc_finter = run_penning_trap_simulation( mlsdc=True, finter=True) f = open('step_4_D_out.txt', 'w') out = 'Timings for SDC, MLSDC and MLSDC+finter: %12.8f -- %12.8f -- %12.8f' % \ (time_sdc, time_mlsdc, time_mlsdc_finter) f.write(out + '\n') print(out) # filter statistics type (etot) filtered_stats_sdc = filter_stats(stats_sdc, type='etot') filtered_stats_mlsdc = filter_stats(stats_mlsdc, type='etot') filtered_stats_mlsdc_finter = filter_stats(stats_mlsdc_finter, type='etot') # sort and convert stats to list, sorted by iteration numbers (only pre- and after-step are present here) energy_sdc = sort_stats(filtered_stats_sdc, sortby='iter') energy_mlsdc = sort_stats(filtered_stats_mlsdc, sortby='iter') energy_mlsdc_finter = sort_stats(filtered_stats_mlsdc_finter, sortby='iter') # get base energy and show differences base_energy = energy_sdc[0][1] for item in energy_sdc: out = 'Total energy and relative deviation in iteration %2i: %12.10f -- %12.8e' % \ (item[0], item[1], abs(base_energy - item[1]) / base_energy) f.write(out + '\n') print(out) for item in energy_mlsdc: out = 'Total energy and relative deviation in iteration %2i: %12.10f -- %12.8e' % \ (item[0], item[1], abs(base_energy - item[1]) / base_energy) f.write(out + '\n') print(out) for item in energy_mlsdc_finter: out = 'Total energy and relative deviation in iteration %2i: %12.10f -- %12.8e' % \ (item[0], item[1], abs(base_energy - item[1]) / base_energy) f.write(out + '\n') print(out) f.close() assert abs(energy_sdc[-1][1] - energy_mlsdc[-1][1]) / base_energy < 6E-10, \ 'ERROR: energy deviated too much between SDC and MLSDC, got %s' % ( abs(energy_sdc[-1][1] - energy_mlsdc[-1][1]) / base_energy) assert abs(energy_mlsdc[-1][1] - energy_mlsdc_finter[-1][1]) / base_energy < 8E-10, \ 'ERROR: energy deviated too much after using finter, got %s' % ( abs(energy_mlsdc[-1][1] - energy_mlsdc_finter[-1][1]) / base_energy)
def main(): """ A simple test program to show th eenergy deviation for different quadrature nodes """ stats_dict = run_simulation() ediff = dict() f = open('step_3_C_out.txt', 'w') for cclass, stats in stats_dict.items(): # filter and convert/sort statistics by etot and iterations filtered_stats = filter_stats(stats, type='etot') energy = sort_stats(filtered_stats, sortby='iter') # compare base and final energy base_energy = energy[0][1] final_energy = energy[-1][1] ediff[cclass] = abs(base_energy - final_energy) out = "Energy deviation for %s: %12.8e" % (cclass, ediff[cclass]) f.write(out + '\n') print(out) f.close() # set expected differences and check ediff_expect = dict() ediff_expect['CollGaussRadau_Right'] = 15 ediff_expect['CollGaussLobatto'] = 1E-05 ediff_expect['CollGaussLegendre'] = 3E-05 for k, v in ediff.items(): assert v < ediff_expect[k], "ERROR: energy deviated too much, got %s" % ediff[k]
def main(): """ A simple tets program to retrieve user-defined statistics from a run """ err, stats = run_penning_trap_simulation() # filter statistics type (etot) filtered_stats = filter_stats(stats, type='etot') # sort and convert stats to list, sorted by iteration numbers (only pre- and after-step are present here) energy = sort_stats(filtered_stats, sortby='iter') # get base energy and show difference base_energy = energy[0][1] f = open('step_3_B_out.txt', 'a') for item in energy: out = 'Total energy and deviation in iteration %2i: %12.10f -- %12.8e' % \ (item[0], item[1], abs(base_energy - item[1])) f.write(out + '\n') print(out) f.close() assert abs(base_energy - energy[-1][1]) < 15, 'ERROR: energy deviated too much, got %s' % \ (base_energy - energy[-1][1]) assert err < 5E-04, "ERROR: solution is not as exact as expected, got %s" % err
def show_residual_across_simulation(stats, fname='residuals.png'): """ Helper routine to visualize the residuals across the simulation (one block of PFASST) Args: stats (dict): statistics object from a PFASST run fname (str): filename """ # get residuals of the run extract_stats = filter_stats(stats, type='residual_post_iteration') # find boundaries for x-,y- and c-axis as well as arrays maxprocs = 0 maxiter = 0 minres = 0 maxres = -99 for k, v in extract_stats.items(): maxprocs = max(maxprocs, getattr(k, 'process')) maxiter = max(maxiter, getattr(k, 'iter')) minres = min(minres, np.log10(v)) maxres = max(maxres, np.log10(v)) # grep residuals and put into array residual = np.zeros((maxiter, maxprocs + 1)) residual[:] = -99 for k, v in extract_stats.items(): step = getattr(k, 'process') iter = getattr(k, 'iter') if iter is not -1: residual[iter - 1, step] = np.log10(v) # Set up latex stuff and fonts rc('font', **{"sans-serif": ["Arial"], "size": 30}) rc('legend', fontsize='small') rc('xtick', labelsize='small') rc('ytick', labelsize='small') # create plot and save fig, ax = plt.subplots(figsize=(15, 10)) cmap = plt.get_cmap('Reds') plt.pcolor(residual.T, cmap=cmap, vmin=minres, vmax=maxres) cax = plt.colorbar() cax.set_label('log10(residual)') ax.set_xlabel('iteration') ax.set_ylabel('process') ax.set_xticks(np.arange(maxiter) + 0.5, minor=False) ax.set_yticks(np.arange(maxprocs + 1) + 0.5, minor=False) ax.set_xticklabels(np.arange(maxiter) + 1, minor=False) ax.set_yticklabels(np.arange(maxprocs + 1), minor=False) plt.savefig(fname, rasterized=True, transparent=True, bbox_inches='tight')
def main(): """ A simple test program to demonstrate residual visualization """ # get parameters from Step 6, Part A description, controller_params, t0, Tend = set_parameters_ml() # use 8 processes here num_proc = 8 # instantiate controller controller = allinclusive_classic_nonMPI( num_procs=num_proc, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare (for testing purposes only) uex = P.u_exact(Tend) err = abs(uex - uend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics min_iter = 99 max_iter = 0 f = open('step_7_A_out.txt', 'w') for item in iter_counts: out = 'Number of iterations for time %4.2f: %1i' % item f.write(out + '\n') print(out) min_iter = min(min_iter, item[1]) max_iter = max(max_iter, item[1]) f.close() # call helper routine to produce residual plot fname = 'step_7_residuals.png' show_residual_across_simulation(stats=stats, fname=fname) assert err < 6.1555e-05, 'ERROR: error is too large, got %s' % err assert os.path.isfile(fname), 'ERROR: residual plot has not been created' assert min_iter == 5 and max_iter == 7, "ERROR: number of iterations not as expected, got %s and %s" % \ (min_iter, max_iter)
def main(): """ A simple test program to describe how to get statistics of a run """ # run simulation stats = run_simulation() f = open('step_3_A_out.txt', 'w') out = 'List of registered statistic types: %s' % get_list_of_types(stats) f.write(out + '\n') print(out) # filter statistics by first time intervall and type (residual) filtered_stats = filter_stats(stats, time=0.1, type='residual_post_iteration') # sort and convert stats to list, sorted by iteration numbers residuals = sort_stats(filtered_stats, sortby='iter') for item in residuals: out = 'Residual in iteration %2i: %8.4e' % item f.write(out + '\n') print(out) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by time iter_counts = sort_stats(filtered_stats, sortby='time') for item in iter_counts: out = 'Number of iterations at time %4.2f: %2i' % item f.write(out + '\n') print(out) f.close() assert all([item[1] == 12 for item in iter_counts]), \ 'ERROR: number of iterations are not as expected, got %s' % iter_counts
def show_results(prob=None, cwd=''): """ Helper function to plot the error of the Hamiltonian Args: prob (str): name of the problem cwd (str): current working directory """ # read in the dill data f = open(cwd + 'data/' + prob + '.dat', 'rb') stats = dill.load(f) f.close() # extract error in hamiltonian and prepare for plotting extract_stats = filter_stats(stats, type='err_hamiltonian') result = defaultdict(list) for k, v in extract_stats.items(): result[k.iter].append((k.time, v)) for k, v in result.items(): result[k] = sorted(result[k], key=lambda x: x[0]) plt_helper.mpl.style.use('classic') plt_helper.setup_mpl() plt_helper.newfig(textwidth=238.96, scale=0.89) # Rearrange data for easy plotting err_ham = 1 for k, v in result.items(): time = [item[0] for item in v] ham = [item[1] for item in v] err_ham = ham[-1] plt_helper.plt.semilogy(time, ham, '-', lw=1, label='Iter ' + str(k)) assert err_ham < 3.7E-08, 'Error in the Hamiltonian is too large for %s, got %s' % ( prob, err_ham) plt_helper.plt.xlabel('Time') plt_helper.plt.ylabel('Error in Hamiltonian') plt_helper.plt.legend(loc='center left', bbox_to_anchor=(1, 0.5)) fname = 'data/' + prob + '_hamiltonian' plt_helper.savefig(fname) assert os.path.isfile(fname + '.pdf'), 'ERROR: plotting did not create PDF file' assert os.path.isfile(fname + '.pgf'), 'ERROR: plotting did not create PGF file' assert os.path.isfile(fname + '.png'), 'ERROR: plotting did not create PNG file'
def show_residual_across_simulation(stats, fname): """ Helper routine to visualize the residuals dependent on the number of iterations across the simulation Args: stats (dict): statistics object fname (str): filename """ # get residuals of the run extract_stats = filter_stats(stats, type='residual_post_iteration') # find boundaries for x-,y-axis as well as arrays maxiter = 0 for k, v in extract_stats.items(): iter = getattr(k, 'iter') maxiter = max(maxiter, iter) # grep residuals and put into array residual = np.zeros(maxiter) residual[:] = -99 for k, v in extract_stats.items(): iter = getattr(k, 'iter') if iter is not -1: residual[iter - 1] = np.log10(v) # Set up latex stuff and fonts rc('font', **{"sans-serif": ["Arial"], "size": 30}) rc('legend', fontsize='small') rc('xtick', labelsize='small') rc('ytick', labelsize='small') # create plot and save fig, ax = plt.subplots(figsize=(15, 10)) ax.set_xlabel('iteration') ax.set_ylabel('log10(residual)') plt.axis([0, 14, -12, 3]) plt.plot(np.linspace(1, maxiter, num=maxiter), residual) plt.savefig(fname) assert os.path.isfile(fname), 'ERROR: plotting did not create PNG file'
def run_clean_simulations(type=None): """ A simple code to run fault-free simulations Args: type (str): setup type f: file handler """ if type == 'diffusion': description, controller_params = diffusion_setup() elif type == 'reaction': description, controller_params = reaction_setup() elif type == 'vanderpol': description, controller_params = vanderpol_setup() else: raise ValueError('No valid setup type provided, aborting..') # set time parameters t0 = 0.0 Tend = description['level_params']['dt'] # instantiate controller controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # this is where the iteration is happening uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # print('This clean run took %s iterations!' % iter_counts[0][1]) return iter_counts[0][1]
def run_SDC_variant(variant=None, inexact=False): """ Routine to run particular SDC variant Args: variant (str): string describing the variant inexact (bool): flag to use inexact nonlinear solve (or nor) Returns: results and statistics of the run """ # load (incomplete) default parameters description, controller_params = setup_parameters() # add stuff based on variant if variant == 'fully-implicit': description['problem_class'] = allencahn_periodic_fullyimplicit # description['problem_class'] = allencahn_front_finel description['sweeper_class'] = generic_implicit if inexact: description['problem_params']['newton_maxiter'] = 1 elif variant == 'semi-implicit': description['problem_class'] = allencahn_periodic_semiimplicit description['sweeper_class'] = imex_1st_order if inexact: description['problem_params']['lin_maxiter'] = 10 elif variant == 'multi-implicit': description['problem_class'] = allencahn_periodic_multiimplicit description['sweeper_class'] = multi_implicit if inexact: description['problem_params']['newton_maxiter'] = 1 description['problem_params']['lin_maxiter'] = 10 # elif variant == 'multi-implicit_v2': # description['problem_class'] = allencahn_multiimplicit_v2 # description['sweeper_class'] = multi_implicit # if inexact: # description['problem_params']['newton_maxiter'] = 1 else: raise NotImplemented('Wrong variant specified, got %s' % variant) if inexact: out = 'Working on inexact %s variant...' % variant else: out = 'Working on exact %s variant...' % variant print(out) # setup parameters "in time" t0 = 0 Tend = description['level_params']['dt'] # instantiate controller controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # plt_helper.plt.plot(uinit.values) # plt_helper.savefig('uinit', save_pdf=False, save_pgf=False, save_png=True) # # uex = P.u_exact(Tend) # plt_helper.plt.plot(uex.values) # plt_helper.savefig('uex', save_pdf=False, save_pgf=False, save_png=True) # exit() # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) plt_helper.plt.plot(uend.values) plt_helper.savefig('uend', save_pdf=False, save_pgf=False, save_png=True) # exit() # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) print(out) print(' Iteration count (nonlinear/linear): %i / %i' % (P.newton_itercount, P.lin_itercount)) print(' Mean Iteration count per call: %4.2f / %4.2f' % (P.newton_itercount / max(P.newton_ncalls, 1), P.lin_itercount / max(P.lin_ncalls, 1))) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) print() return stats
def show_results(fname, cwd=''): """ Plotting routine Args: fname (str): file name to read in and name plots cwd (str): current working directory """ file = open(cwd + fname + '.pkl', 'rb') results = dill.load(file) file.close() # plt_helper.mpl.style.use('classic') plt_helper.setup_mpl() # set up plot for timings fig, ax1 = plt_helper.newfig(textwidth=238.96, scale=1.5, ratio=0.4) timings = {} niters = {} for key, item in results.items(): timings[key] = sort_stats(filter_stats(item, type='timing_run'), sortby='time')[0][1] iter_counts = sort_stats(filter_stats(item, type='niter'), sortby='time') niters[key] = np.mean(np.array([item[1] for item in iter_counts])) xcoords = [i for i in range(len(timings))] sorted_timings = sorted([(key, timings[key]) for key in timings], reverse=True, key=lambda tup: tup[1]) sorted_niters = [(k, niters[k]) for k in [key[0] for key in sorted_timings]] heights_timings = [item[1] for item in sorted_timings] heights_niters = [item[1] for item in sorted_niters] keys = [ (item[0][1] + ' ' + item[0][0]).replace('-', '\n').replace('_v2', ' mod.') for item in sorted_timings ] ax1.bar(xcoords, heights_timings, align='edge', width=-0.3, label='timings (left axis)') ax1.set_ylabel('time (sec)') ax2 = ax1.twinx() ax2.bar(xcoords, heights_niters, color='r', align='edge', width=0.3, label='iterations (right axis)') ax2.set_ylabel('mean number of iterations') ax1.set_xticks(xcoords) ax1.set_xticklabels(keys, rotation=90, ha='center') # ask matplotlib for the plotted objects and their labels lines, labels = ax1.get_legend_handles_labels() lines2, labels2 = ax2.get_legend_handles_labels() ax2.legend(lines + lines2, labels + labels2, loc=0) # save plot, beautify f = fname + '_timings' plt_helper.savefig(f) assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file' assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file' assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file' # set up plot for radii fig, ax = plt_helper.newfig(textwidth=238.96, scale=1.0) exact_radii = [] for key, item in results.items(): computed_radii = sort_stats(filter_stats(item, type='computed_radius'), sortby='time') xcoords = [item0[0] for item0 in computed_radii] radii = [item0[1] for item0 in computed_radii] if key[0] + ' ' + key[1] == 'fully-implicit exact': ax.plot(xcoords, radii, label=(key[0] + ' ' + key[1]).replace('_v2', ' mod.')) exact_radii = sort_stats(filter_stats(item, type='exact_radius'), sortby='time') diff = np.array([ abs(item0[1] - item1[1]) for item0, item1 in zip(exact_radii, computed_radii) ]) max_pos = int(np.argmax(diff)) assert max( diff ) < 0.07, 'ERROR: computed radius is too far away from exact radius, got %s' % max( diff) assert 0.028 < computed_radii[max_pos][0] < 0.03, \ 'ERROR: largest difference is at wrong time, got %s' % computed_radii[max_pos][0] xcoords = [item[0] for item in exact_radii] radii = [item[1] for item in exact_radii] ax.plot(xcoords, radii, color='k', linestyle='--', linewidth=1, label='exact') ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%1.2f')) ax.set_ylabel('radius') ax.set_xlabel('time') ax.grid() ax.legend(loc=3) # save plot, beautify f = fname + '_radii' plt_helper.savefig(f) assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file' assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file' assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file' # set up plot for interface width fig, ax = plt_helper.newfig(textwidth=238.96, scale=1.0) interface_width = [] for key, item in results.items(): interface_width = sort_stats(filter_stats(item, type='interface_width'), sortby='time') xcoords = [item[0] for item in interface_width] width = [item[1] for item in interface_width] if key[0] + ' ' + key[1] == 'fully-implicit exact': ax.plot(xcoords, width, label=key[0] + ' ' + key[1]) xcoords = [item[0] for item in interface_width] init_width = [interface_width[0][1]] * len(xcoords) ax.plot(xcoords, init_width, color='k', linestyle='--', linewidth=1, label='exact') ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%1.2f')) ax.set_ylabel(r'interface width ($\epsilon$)') ax.set_xlabel('time') ax.grid() ax.legend(loc=3) # save plot, beautify f = fname + '_interface' plt_helper.savefig(f) assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file' assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file' assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file' return None
def compare_controllers(type=None, par=0.0, f=None): """ A simple test program to compare PFASST runs with matrix-based and matrix-free controllers Args: type (str): setup type par (float): parameter for controlling stiffness f: file handler """ # set time parameters t0 = 0.0 Tend = 1.0 if type == 'diffusion': description, controller_params = diffusion_setup(par) elif type == 'advection': description, controller_params = advection_setup(par) elif type == 'testequation': description, controller_params = scalar_equation_setup() else: raise ValueError('No valis setup type provided, aborting..') out = '\nWorking with %s setup and parameter %3.1e..' % (type, par) f.write(out + '\n') print(out) # instantiate controller controller = controller_matrix_nonMPI(num_procs=4, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) uex = P.u_exact(Tend) # this is where the iteration is happening uend_mat, stats_mat = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats_mat = filter_stats(stats_mat, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts_mat = sort_stats(filtered_stats_mat, sortby='time') out = ' Iteration counts for matrix-based version: %s' % iter_counts_mat f.write(out + '\n') print(out) # filter only iteration counts and check for equality niters = [item[1] for item in iter_counts_mat] assert niters.count(niters[0]) == len( niters), 'ERROR: not all time-steps have the same number of iterations' niter = niters[0] # build propagation matrix using the prescribed number of iterations (or any other, if needed) prop = controller.build_propagation_matrix(niter=niter) err_prop_ex = np.linalg.norm(prop.dot(uinit.values) - uex.values) err_mat_ex = np.linalg.norm(uend_mat.values - uex.values) out = ' Error (mat/prop) vs. exact solution: %6.4e -- %6.4e' % ( err_mat_ex, err_prop_ex) f.write(out + '\n') print(out) err_mat_prop = np.linalg.norm(prop.dot(uinit.values) - uend_mat.values) out = ' Difference between matrix-PFASST and propagator: %6.4e' % err_mat_prop f.write(out + '\n') print(out) assert err_mat_prop < 2.0E-14, \ 'ERROR: difference between matrix-based and propagator result is too large, got %s' % err_mat_prop
def run_variant(nlevels=None): """ Routine to run particular SDC variant Args: Returns: """ # load (incomplete) default parameters description, controller_params = setup_parameters() # add stuff based on variant if nlevels == 1: description['level_params']['nsweeps'] = 1 description['problem_params']['nvars'] = [(128, 128)] # description['problem_params']['nvars'] = [(32, 32)] elif nlevels == 2: description['level_params']['nsweeps'] = [1, 1] description['problem_params']['nvars'] = [(128, 128), (32, 32)] # description['problem_params']['nvars'] = [(32, 32), (16, 16)] else: raise NotImplemented('Wrong variant specified, got %s' % nlevels) out = 'Working on %s levels...' % nlevels print(out) # setup parameters "in time" t0 = 0.0 Tend = 0.032 # instantiate controller controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) fname = 'data/AC_reference_FFT_Tend{:.1e}'.format(Tend) + '.npz' loaded = np.load(fname) uref = loaded['uend'] err = np.linalg.norm(uref - uend.values, np.inf) print('Error vs. reference solution: %6.4e' % err) print() return stats
def run_diffusion(nsweeps): """ A simple test program to test PFASST convergence for the heat equation with random initial data Args: nsweeps: number of fine sweeps to perform """ # initialize level parameters level_params = dict() level_params['restol'] = 1E-08 level_params['dt'] = 0.25 level_params['nsweeps'] = [nsweeps, 1] # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] sweeper_params['QI'] = ['LU'] sweeper_params['initial_guess'] = 'zero' # initialize problem parameters problem_params = dict() problem_params['freq'] = -1 # frequency for the test value problem_params['nvars'] = [127, 63 ] # number of degrees of freedom for each level # initialize step parameters step_params = dict() step_params['maxiter'] = 50 # initialize space transfer parameters space_transfer_params = dict() space_transfer_params['rorder'] = 2 space_transfer_params['iorder'] = 2 space_transfer_params['periodic'] = False # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = heat1d # pass problem class description[ 'sweeper_class'] = generic_implicit # pass sweeper (see part B) description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description[ 'space_transfer_class'] = mesh_to_mesh # pass spatial transfer class description[ 'space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set time parameters t0 = 0.0 Tend = 4 * level_params['dt'] # set up number of parallel time-steps to run PFASST with num_proc = 4 results = dict() for i in range(-3, 10): ratio = level_params['dt'] / (1.0 / (problem_params['nvars'][0] + 1))**2 problem_params['nu'] = 10.0**i / ratio # diffusion coefficient description[ 'problem_params'] = problem_params # pass problem parameters out = 'Working on c = %6.4e' % problem_params['nu'] print(out) cfl = ratio * problem_params['nu'] out = ' CFL number: %4.2e' % cfl print(out) # instantiate controller controller = controller_nonMPI(num_procs=num_proc, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) if nsweeps == 3 and (i == -3 or i == 9): assert np.mean(niters) <= 2, 'ERROR: too much iterations for diffusive asymptotics, got %s' \ % np.mean(niters) results[cfl] = np.mean(niters) fname = 'data/results_conv_diffusion_NS' + str(nsweeps) + '.pkl' file = open(fname, 'wb') pickle.dump(results, file) file.close() assert os.path.isfile(fname), 'ERROR: pickle did not create file'
def main(): # initialize level parameters level_params = dict() level_params['restol'] = 1E-10 level_params['dt'] = 0.01 # This comes as read-in for the step class (this is optional!) step_params = dict() step_params['maxiter'] = 50 # This comes as read-in for the problem class problem_params = dict() problem_params['nu'] = 1 problem_params['nvars'] = 255 problem_params['lambda0'] = 5.0 problem_params['newton_maxiter'] = 50 problem_params['newton_tol'] = 1E-12 problem_params['interval'] = (-5, 5) # This comes as read-in for the sweeper class sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = 5 sweeper_params['QI'] = 'LU' sweeper_params['fixed_time_in_jacobian'] = 0 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 # Fill description dictionary for easy hierarchy creation description = dict() description['problem_class'] = generalized_fisher_jac description['problem_params'] = problem_params description['sweeper_params'] = sweeper_params description['level_params'] = level_params description['step_params'] = step_params sweeper_list = [ generic_implicit, linearized_implicit_fixed_parallel_prec, linearized_implicit_fixed_parallel, linearized_implicit_parallel ] f = open('parallelSDC_nonlinear_out.txt', 'w') uinit = None uex = None uend = None P = None # loop over the different sweepers and check results for sweeper in sweeper_list: description['sweeper_class'] = sweeper # instantiate the controller controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) # setup parameters "in time" t0 = 0 Tend = 0.1 # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare uex = P.u_exact(Tend) err = abs(uex - uend) print('error at time %s: %s' % (Tend, err)) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) f.write(out + '\n') print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp( niters) f.write(out + '\n') print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) f.write(out + '\n') print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % \ (float(np.std(niters)), float(np.var(niters))) f.write(out + '\n') f.write(out + '\n') print(out) f.write('\n') print() assert err < 3.686e-05, 'ERROR: error is too high for sweeper %s, got %s' % ( sweeper.__name__, err) assert np.mean(niters) == 7.5 or np.mean(niters) == 4.0, \ 'ERROR: mean number of iterations not as expected, got %s' % np.mean(niters) f.close() results = dict() results['interval'] = problem_params['interval'] results['xvalues'] = np.array([(i + 1 - (P.params.nvars + 1) / 2) * P.dx for i in range(P.params.nvars)]) results['uinit'] = uinit.values results['uend'] = uend.values results['uex'] = uex.values # write out for later visualization file = open('data/parallelSDC_results_graphs.pkl', 'wb') pickle.dump(results, file) assert os.path.isfile('data/parallelSDC_results_graphs.pkl' ), 'ERROR: pickle did not create file'
def run_simulation(spectral=None, ml=None, num_procs=None): """ A test program to do SDC, MLSDC and PFASST runs for the 2D NLS equation Args: spectral (bool): run in real or spectral space ml (bool): single or multiple levels num_procs (int): number of parallel processors """ comm = MPI.COMM_WORLD rank = comm.Get_rank() # initialize level parameters level_params = dict() level_params['restol'] = 1E-08 level_params['dt'] = 1E-01 / 2 level_params['nsweeps'] = [1] # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] sweeper_params['QI'] = [ 'LU' ] # For the IMEX sweeper, the LU-trick can be activated for the implicit part sweeper_params['initial_guess'] = 'zero' # initialize problem parameters problem_params = dict() if ml: problem_params['nvars'] = [(128, 128), (32, 32)] else: problem_params['nvars'] = [(128, 128)] problem_params['spectral'] = spectral problem_params['comm'] = comm # initialize step parameters step_params = dict() step_params['maxiter'] = 50 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 if rank == 0 else 99 # controller_params['predict_type'] = 'fine_only' # fill description dictionary for easy step instantiation description = dict() description['problem_params'] = problem_params # pass problem parameters description['problem_class'] = nonlinearschroedinger_imex description['sweeper_class'] = imex_1st_order description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description['space_transfer_class'] = fft_to_fft # set time parameters t0 = 0.0 Tend = 1.0 f = None if rank == 0: f = open('step_7_B_out.txt', 'a') out = f'Running with ml={ml} and num_procs={num_procs}...' f.write(out + '\n') print(out) # instantiate controller controller = controller_nonMPI(num_procs=num_procs, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) uex = P.u_exact(Tend) err = abs(uex - uend) if rank == 0: # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') niters = np.array([item[1] for item in iter_counts]) out = f' Min/Mean/Max number of iterations: ' \ f'{np.min(niters):4.2f} / {np.mean(niters):4.2f} / {np.max(niters):4.2f}' f.write(out + '\n') print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp( niters) f.write(out + '\n') print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) f.write(out + '\n') print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % ( float(np.std(niters)), float(np.var(niters))) f.write(out + '\n') print(out) out = f'Error: {err:6.4e}' f.write(out + '\n') print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') out = f'Time to solution: {timing[0][1]:6.4f} sec.' f.write(out + '\n') print(out) assert err <= 1.133E-05, 'Error is too high, got %s' % err if ml: if num_procs > 1: maxmean = 12.5 else: maxmean = 6.6 else: maxmean = 12.7 assert np.mean( niters ) <= maxmean, 'Mean number of iterations is too high, got %s' % np.mean( niters) f.write('\n') print() f.close()
def main(): # initialize level parameters level_params = dict() level_params['restol'] = 1E-12 # This comes as read-in for the step class (this is optional!) step_params = dict() step_params['maxiter'] = 20 # This comes as read-in for the problem class problem_params = dict() problem_params['nu'] = 1 problem_params['nvars'] = 2047 problem_params['lambda0'] = 5.0 problem_params['newton_maxiter'] = 50 problem_params['newton_tol'] = 1E-12 problem_params['interval'] = (-5, 5) # This comes as read-in for the sweeper class sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = 5 sweeper_params['QI'] = 'LU' sweeper_params['fixed_time_in_jacobian'] = 0 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 controller_params['hook_class'] = err_reduction_hook # Fill description dictionary for easy hierarchy creation description = dict() description['problem_class'] = generalized_fisher_jac description['problem_params'] = problem_params description['dtype_u'] = mesh description['dtype_f'] = mesh description['sweeper_params'] = sweeper_params description['step_params'] = step_params # setup parameters "in time" t0 = 0 Tend = 0.1 sweeper_list = [ generic_implicit, linearized_implicit_fixed_parallel, linearized_implicit_fixed_parallel_prec ] dt_list = [Tend / 2**i for i in range(1, 5)] results = dict() results['sweeper_list'] = [sweeper.__name__ for sweeper in sweeper_list] results['dt_list'] = dt_list # loop over the different sweepers and check results for sweeper in sweeper_list: description['sweeper_class'] = sweeper error_reduction = [] for dt in dt_list: print('Working with sweeper %s and dt = %s...' % (sweeper.__name__, dt)) level_params['dt'] = dt description['level_params'] = level_params # instantiate the controller controller = allinclusive_classic_nonMPI( num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics filtered_stats = filter_stats(stats, type='error_pre_iteration') error_pre = sort_stats(filtered_stats, sortby='iter')[0][1] filtered_stats = filter_stats(stats, type='error_post_iteration') error_post = sort_stats(filtered_stats, sortby='iter')[0][1] error_reduction.append(error_post / error_pre) print('error and reduction rate at time %s: %6.4e -- %6.4e' % (Tend, error_post, error_reduction[-1])) results[sweeper.__name__] = error_reduction print() file = open('data/error_reduction_data.pkl', 'wb') pickle.dump(results, file) file.close()
def run_reference(): """ Helper routine to create a reference solution using very high order SDC and small time-steps """ description, controller_params = setup_parameters() description['problem_class'] = petsc_grayscott_semiimplicit description['dtype_f'] = rhs_imex_petsc_data description['sweeper_class'] = imex_1st_order description['sweeper_params']['num_nodes'] = 9 description['level_params']['dt'] = 0.01 # set time parameters t0 = 0.0 Tend = 1.0 # instantiate controller controller = allinclusive_multigrid_nonMPI( num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) print(out) print('Iteration count (nonlinear/linear): %i / %i' % (P.snes_itercount, P.ksp_itercount)) print('Mean Iteration count per call: %4.2f / %4.2f' % (P.snes_itercount / max(P.snes_ncalls, 1), P.ksp_itercount / max(P.ksp_ncalls, 1))) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) fname = 'data/GS_reference.dat' viewer = PETSc.Viewer().createBinary(fname, 'w') viewer.view(uend.values) assert os.path.isfile(fname), 'ERROR: PETSc did not create file' return None
def main(): """ A simple test program to do compare PFASST with multi-step SDC """ # initialize level parameters level_params = dict() level_params['restol'] = 5E-10 level_params['dt'] = 0.125 # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] # initialize problem parameters problem_params = dict() problem_params['nu'] = 0.1 # diffusion coefficient problem_params['freq'] = 2 # frequency for the test value # initialize step parameters step_params = dict() step_params['maxiter'] = 50 # initialize space transfer parameters space_transfer_params = dict() space_transfer_params['rorder'] = 2 space_transfer_params['iorder'] = 6 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 40 # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = heat1d # pass problem class description['sweeper_class'] = generic_LU # pass sweeper description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description[ 'space_transfer_class'] = mesh_to_mesh # pass spatial transfer class description[ 'space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set up parameters for PFASST run problem_params['nvars'] = [63, 31] description['problem_params'] = problem_params.copy() description_pfasst = description.copy() # set up parameters for MSSDC run problem_params['nvars'] = [63] description['problem_params'] = problem_params.copy() description_mssdc = description.copy() controller_params['mssdc_jac'] = True controller_params_jac = controller_params.copy() controller_params['mssdc_jac'] = False controller_params_gs = controller_params.copy() # set time parameters t0 = 0.0 Tend = 1.0 # set up list of parallel time-steps to run PFASST/MSSDC with num_proc = 8 # instantiate controllers controller_mssdc_jac = controller_nonMPI( num_procs=num_proc, controller_params=controller_params_jac, description=description_mssdc) controller_mssdc_gs = controller_nonMPI( num_procs=num_proc, controller_params=controller_params_gs, description=description_mssdc) controller_pfasst = controller_nonMPI(num_procs=num_proc, controller_params=controller_params, description=description_pfasst) # get initial values on finest level P = controller_mssdc_jac.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main functions to get things done... uend_pfasst, stats_pfasst = controller_pfasst.run(u0=uinit, t0=t0, Tend=Tend) uend_mssdc_jac, stats_mssdc_jac = controller_mssdc_jac.run(u0=uinit, t0=t0, Tend=Tend) uend_mssdc_gs, stats_mssdc_gs = controller_mssdc_gs.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare for both runs uex = P.u_exact(Tend) err_mssdc_jac = abs(uex - uend_mssdc_jac) err_mssdc_gs = abs(uex - uend_mssdc_gs) err_pfasst = abs(uex - uend_pfasst) diff_jac = abs(uend_mssdc_jac - uend_pfasst) diff_gs = abs(uend_mssdc_gs - uend_pfasst) diff_jac_gs = abs(uend_mssdc_gs - uend_mssdc_jac) f = open('step_8_B_out.txt', 'w') out = 'Error PFASST: %12.8e' % err_pfasst f.write(out + '\n') print(out) out = 'Error parallel MSSDC: %12.8e' % err_mssdc_jac f.write(out + '\n') print(out) out = 'Error serial MSSDC: %12.8e' % err_mssdc_gs f.write(out + '\n') print(out) out = 'Diff PFASST vs. parallel MSSDC: %12.8e' % diff_jac f.write(out + '\n') print(out) out = 'Diff PFASST vs. serial MSSDC: %12.8e' % diff_gs f.write(out + '\n') print(out) out = 'Diff parallel vs. serial MSSDC: %12.8e' % diff_jac_gs f.write(out + '\n') print(out) # filter statistics by type (number of iterations) filtered_stats_pfasst = filter_stats(stats_pfasst, type='niter') filtered_stats_mssdc_jac = filter_stats(stats_mssdc_jac, type='niter') filtered_stats_mssdc_gs = filter_stats(stats_mssdc_gs, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts_pfasst = sort_stats(filtered_stats_pfasst, sortby='time') iter_counts_mssdc_jac = sort_stats(filtered_stats_mssdc_jac, sortby='time') iter_counts_mssdc_gs = sort_stats(filtered_stats_mssdc_gs, sortby='time') # compute and print statistics for item_pfasst, item_mssdc_jac, item_mssdc_gs in \ zip(iter_counts_pfasst, iter_counts_mssdc_jac, iter_counts_mssdc_gs): out = 'Number of iterations for time %4.2f (PFASST/parMSSDC/serMSSDC): %2i / %2i / %2i' % \ (item_pfasst[0], item_pfasst[1], item_mssdc_jac[1], item_mssdc_gs[1]) f.write(out + '\n') print(out) f.close() # call helper routine to produce residual plot show_residual_across_simulation(stats_mssdc_jac, 'step_8_residuals_mssdc_jac.png') show_residual_across_simulation(stats_mssdc_gs, 'step_8_residuals_mssdc_gs.png') assert os.path.isfile('step_8_residuals_mssdc_jac.png') assert os.path.isfile('step_8_residuals_mssdc_gs.png') assert diff_jac < 3.1E-10, \ "ERROR: difference between PFASST and parallel MSSDC controller is too large, got %s" % diff_jac assert diff_gs < 3.1E-10, \ "ERROR: difference between PFASST and serial MSSDC controller is too large, got %s" % diff_gs assert diff_jac_gs < 3.1E-10, \ "ERROR: difference between parallel and serial MSSDC controller is too large, got %s" % diff_jac_gs
def run_variants(variant=None, ml=None, num_procs=None): """ Main routine to run the different implementations of the heat equation with FEniCS Args: variant (str): specifies the variant ml (bool): use single or multiple levels num_procs (int): number of processors in time """ Tend = 1.0 t0 = 0.0 description, controller_params = setup(t0=t0, ml=ml) if variant == 'mass': # Note that we need to reduce the tolerance for the residual here, since otherwise the error will be too high description['level_params']['restol'] /= 500 description['problem_class'] = fenics_heat_mass description['sweeper_class'] = imex_1st_order_mass elif variant == 'mass_inv': description['problem_class'] = fenics_heat description['sweeper_class'] = imex_1st_order elif variant == 'weak': description['problem_class'] = fenics_heat_weak_imex description['sweeper_class'] = imex_1st_order else: raise NotImplementedError('Variant %s is not implemented' % variant) # quickly generate block of steps controller = controller_nonMPI(num_procs=num_procs, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(0.0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare uex = P.u_exact(Tend) err = abs(uex - uend) / abs(uex) f = open('step_7_A_out.txt', 'a') out = f'Variant {variant} with ml={ml} and num_procs={num_procs} -- error at time {Tend}: {err}' f.write(out + '\n') print(out) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) f.write(out + '\n') print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) f.write(out + '\n') print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) f.write(out + '\n') print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) f.write(out + '\n') print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') out = f'Time to solution: {timing[0][1]:6.4f} sec.' f.write(out + '\n') print(out) if num_procs == 1: assert np.mean( niters ) <= 6.0, 'Mean number of iterations is too high, got %s' % np.mean( niters) assert err <= 4.1E-08, 'Error is too high, got %s' % err else: assert np.mean( niters ) <= 11.6, 'Mean number of iterations is too high, got %s' % np.mean( niters) assert err <= 4.0E-08, 'Error is too high, got %s' % err f.write('\n') print() f.close()
description=description, comm=comm) # get initial values on finest level P = controller_classic.S.levels[0].prob uinit = P.u_exact(t0) # call main functions to get things done... uend_classic, stats_classic = controller_classic.run(u0=uinit, t0=t0, Tend=Tend) uend_multigrid, stats_multigrid = controller_multigrid.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats_classic = filter_stats(stats_classic, type='niter') filtered_stats_multigrid = filter_stats(stats_multigrid, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts_classic = sort_stats(filtered_stats_classic, sortby='time') iter_counts_multigrid = sort_stats(filtered_stats_multigrid, sortby='time') # combine statistics into list of statistics iter_counts_classic_list = comm.gather(iter_counts_classic, root=0) iter_counts_multigrid_list = comm.gather(iter_counts_multigrid, root=0) rank = comm.Get_rank() size = comm.Get_size() if rank == 0:
def main(): """ A simple test program to do PFASST runs for the heat equation """ # set MPI communicator comm = MPI.COMM_WORLD world_rank = comm.Get_rank() world_size = comm.Get_size() # split world communicator to create space-communicators if len(sys.argv) >= 2: color = int(world_rank / int(sys.argv[1])) else: color = int(world_rank / 1) space_comm = comm.Split(color=color) space_size = space_comm.Get_size() space_rank = space_comm.Get_rank() # split world communicator to create time-communicators if len(sys.argv) >= 2: color = int(world_rank % int(sys.argv[1])) else: color = int(world_rank / world_size) time_comm = comm.Split(color=color) time_size = time_comm.Get_size() time_rank = time_comm.Get_rank() print( "IDs (world, space, time): %i / %i -- %i / %i -- %i / %i" % (world_rank, world_size, space_rank, space_size, time_rank, time_size)) # initialize level parameters level_params = dict() level_params['restol'] = 1E-08 level_params['dt'] = 1.0 / 4 level_params['nsweeps'] = [1] # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] sweeper_params['QI'] = [ 'LU' ] # For the IMEX sweeper, the LU-trick can be activated for the implicit part # sweeper_params['spread'] = False # initialize problem parameters problem_params = dict() problem_params['nu'] = 0.1 # diffusion coefficient problem_params['freq'] = 2 # frequency for the test value problem_params['nvars'] = [(16, 16), (4, 4) ] # number of degrees of freedom for each level problem_params['comm'] = space_comm # initialize step parameters step_params = dict() step_params['maxiter'] = 10 # initialize controller parameters controller_params = dict() controller_params[ 'logger_level'] = 20 if space_rank == 0 else 99 # set level depending on rank # controller_params['hook_class'] = error_output # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = heat2d_dedalus_forced description['problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = imex_1st_order description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description['space_transfer_class'] = dedalus_field_transfer # description['space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set time parameters t0 = 0.0 Tend = 1.0 # instantiate controller controller = controller_MPI(controller_params=controller_params, description=description, comm=time_comm) # get initial values on finest level P = controller.S.levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare uex = P.u_exact(Tend) err = abs(uex - uend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') if space_rank == 0: out = 'This is time-rank %i...' % time_rank print(out) # compute and print statistics for item in iter_counts: out = 'Number of iterations for time %4.2f: %2i' % item print(out) niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp( niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % ( float(np.std(niters)), float(np.var(niters))) print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') out = 'Time to solution: %6.4f sec.' % timing[0][1] print(out) print('Error: %8.4e' % err)
def run_variant(variant=None): """ Routine to run a particular variant Args: variant (str): string describing the variant """ # initialize level parameters level_params = dict() level_params['restol'] = 1E-07 level_params['dt'] = 1E-03 / 2 level_params['nsweeps'] = 1 # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = 3 sweeper_params['initial_guess'] = 'zero' # This comes as read-in for the problem class problem_params = dict() problem_params['nu'] = 2 problem_params['eps'] = 0.04 problem_params['newton_maxiter'] = 100 problem_params['newton_tol'] = 1E-08 problem_params['lin_tol'] = 1E-09 problem_params['lin_maxiter'] = 100 problem_params['radius'] = 0.25 # initialize step parameters step_params = dict() step_params['maxiter'] = 50 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 controller_params['hook_class'] = monitor # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = allencahn_fullyimplicit description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters do_print = True # add stuff based on variant if variant == 'sl_serial': maxmeaniters = 5.0 sweeper_params['QI'] = ['LU'] problem_params['nvars'] = [(128, 128)] description[ 'problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = generic_implicit # pass sweeper description[ 'sweeper_params'] = sweeper_params # pass sweeper parameters elif variant == 'sl_parallel': maxmeaniters = 5.12 assert MPI.COMM_WORLD.Get_size() == sweeper_params['num_nodes'] sweeper_params['QI'] = ['MIN3'] sweeper_params['comm'] = MPI.COMM_WORLD problem_params['nvars'] = [(128, 128)] description[ 'problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = generic_implicit_MPI # pass sweeper description[ 'sweeper_params'] = sweeper_params # pass sweeper parameters do_print = MPI.COMM_WORLD.Get_rank() == 0 elif variant == 'ml_serial': maxmeaniters = 3.125 sweeper_params['QI'] = ['LU'] problem_params['nvars'] = [(128, 128), (64, 64)] description['space_transfer_class'] = mesh_to_mesh_fft2d description[ 'problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = generic_implicit # pass sweeper description[ 'sweeper_params'] = sweeper_params # pass sweeper parameters elif variant == 'ml_parallel': assert MPI.COMM_WORLD.Get_size() == sweeper_params['num_nodes'] maxmeaniters = 4.25 sweeper_params['QI'] = ['MIN3'] sweeper_params['comm'] = MPI.COMM_WORLD problem_params['nvars'] = [(128, 128), (64, 64)] description[ 'problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = generic_implicit_MPI # pass sweeper description[ 'sweeper_params'] = sweeper_params # pass sweeper parameters description['space_transfer_class'] = mesh_to_mesh_fft2d description['base_transfer_class'] = base_transfer_MPI do_print = MPI.COMM_WORLD.Get_rank() == 0 else: raise NotImplemented('Wrong variant specified, got %s' % variant) if do_print: out = 'Working on %s variant...' % variant print(out) # setup parameters "in time" t0 = 0 Tend = 0.004 # instantiate controller controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) if do_print: out = ' Mean number of iterations: %4.2f' % np.mean(niters) assert np.mean(niters) <= maxmeaniters, 'ERROR: number of iterations is too high, got %s instead of %s' \ % (np.mean(niters), maxmeaniters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp( niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % ( float(np.std(niters)), float(np.var(niters))) print(out) print(' Iteration count (nonlinear/linear): %i / %i' % (P.newton_itercount, P.lin_itercount)) print(' Mean Iteration count per call: %4.2f / %4.2f' % (P.newton_itercount / max(P.newton_ncalls, 1), P.lin_itercount / max(P.lin_ncalls, 1))) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) return None
def run_simulations(type=None, ndim_list=None, Tend=None, nsteps_list=None, ml=False, nprocs=None): """ A simple test program to do SDC runs for the heat equation in various dimensions """ t0 = None dt = None description = None controller_params = None f = open('step_8_C_out.txt', 'a') for ndim in ndim_list: for nsteps in nsteps_list: if type == 'diffusion': # set time parameters t0 = 0.0 dt = (Tend - t0) / nsteps description, controller_params = setup_diffusion(dt, ndim, ml) elif type == 'advection': # set time parameters t0 = 0.0 dt = (Tend - t0) / nsteps description, controller_params = setup_advection(dt, ndim, ml) elif type == 'auzinger': assert ndim == 1 # set time parameters t0 = 0.0 dt = (Tend - t0) / nsteps description, controller_params = setup_auzinger(dt, ml) out = f'Running {type} in {ndim} dimensions with time-step size {dt}...\n' f.write(out + '\n') print(out) # Warning: this is black magic used to run an 'exact' collocation solver for each step within the hooks description['step_params']['description'] = description description['step_params']['controller_params'] = controller_params # instantiate controller controller = controller_nonMPI(num_procs=nprocs, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) iter_counts = sort_stats(filter_stats(stats, type='niter'), sortby='time') niters = np.array([item[1] for item in iter_counts]) out = f' Mean number of iterations: {np.mean(niters):4.2f}' f.write(out + '\n') print(out) # filter statistics by type (error after time-step) PDE_errors = sort_stats(filter_stats(stats, type='PDE_error_after_step'), sortby='time') coll_errors = sort_stats(filter_stats( stats, type='coll_error_after_step'), sortby='time') for iters, PDE_err, coll_err in zip(iter_counts, PDE_errors, coll_errors): assert coll_err[1] < description['step_params'][ 'errtol'], f'Error too high, got {coll_err[1]:8.4e}' out = f' Errors after step {PDE_err[0]:8.4f} with {iters[1]} iterations: ' \ f'{PDE_err[1]:8.4e} / {coll_err[1]:8.4e}' f.write(out + '\n') print(out) f.write('\n') print() # filter statistics by type (error after time-step) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') out = f'...done, took {timing[0][1]} seconds!' f.write(out + '\n') print(out) print() out = '-----------------------------------------------------------------------------' f.write(out + '\n') print(out) f.close()
def run_SDC_variant(variant=None, inexact=False, cwd=''): """ Routine to run particular SDC variant Args: variant (str): string describing the variant inexact (bool): flag to use inexact nonlinear solve (or nor) cwd (str): current working directory Returns: timing (float) niter (float) """ # load (incomplete) default parameters description, controller_params = setup_parameters() # add stuff based on variant if variant == 'fully-implicit': description['problem_class'] = petsc_grayscott_fullyimplicit description['dtype_f'] = petsc_data description['sweeper_class'] = generic_implicit elif variant == 'semi-implicit': description['problem_class'] = petsc_grayscott_semiimplicit description['dtype_f'] = rhs_imex_petsc_data description['sweeper_class'] = imex_1st_order elif variant == 'multi-implicit': description['problem_class'] = petsc_grayscott_multiimplicit description['dtype_f'] = rhs_2comp_petsc_data description['sweeper_class'] = multi_implicit else: raise NotImplemented('Wrong variant specified, got %s' % variant) if inexact: description['problem_params']['lsol_maxiter'] = 2 description['problem_params']['nlsol_maxiter'] = 1 out = 'Working on inexact %s variant...' % variant else: out = 'Working on exact %s variant...' % variant print(out) # set time parameters t0 = 0.0 Tend = 1.0 # instantiate controller controller = allinclusive_multigrid_nonMPI( num_procs=1, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # load reference solution to compare with fname = cwd + 'data/GS_reference.dat' viewer = PETSc.Viewer().createBinary(fname, 'r') uex = P.u_exact(t0) uex.values = PETSc.Vec().load(viewer) err = abs(uex - uend) # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) out = ' Range of values for number of iterations: %2i ' % np.ptp(niters) print(out) out = ' Position of max/min number of iterations: %2i -- %2i' % \ (int(np.argmax(niters)), int(np.argmin(niters))) print(out) out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float( np.std(niters)), float(np.var(niters))) print(out) print('Iteration count (nonlinear/linear): %i / %i' % (P.snes_itercount, P.ksp_itercount)) print('Mean Iteration count per call: %4.2f / %4.2f' % (P.snes_itercount / max(P.snes_ncalls, 1), P.ksp_itercount / max(P.ksp_ncalls, 1))) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print('Time to solution: %6.4f sec.' % timing[0][1]) print('Error vs. reference solution: %6.4e' % err) print() assert err < 3E-06, 'ERROR: variant %s did not match error tolerance, got %s' % ( variant, err) assert np.mean( niters ) <= 10, 'ERROR: number of iterations is too high, got %s' % np.mean( niters) return timing[0][1], np.mean(niters)
def run_simulation(name=''): """ A simple test program to do PFASST runs for the AC equation """ # set MPI communicator comm = MPI.COMM_WORLD world_rank = comm.Get_rank() world_size = comm.Get_size() # split world communicator to create space-communicators if len(sys.argv) >= 2: color = int(world_rank / int(sys.argv[1])) else: color = int(world_rank / 1) space_comm = comm.Split(color=color) # space_size = space_comm.Get_size() space_rank = space_comm.Get_rank() # split world communicator to create time-communicators if len(sys.argv) >= 2: color = int(world_rank % int(sys.argv[1])) else: color = int(world_rank / world_size) time_comm = comm.Split(color=color) # time_size = time_comm.Get_size() time_rank = time_comm.Get_rank() # print("IDs (world, space, time): %i / %i -- %i / %i -- %i / %i" % (world_rank, world_size, space_rank, # space_size, time_rank, time_size)) # initialize level parameters level_params = dict() level_params['restol'] = 1E-08 level_params['dt'] = 1E-03 level_params['nsweeps'] = [3, 1] # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] sweeper_params['QI'] = [ 'LU' ] # For the IMEX sweeper, the LU-trick can be activated for the implicit part sweeper_params['initial_guess'] = 'zero' # initialize problem parameters problem_params = dict() problem_params['L'] = 1.0 problem_params['nvars'] = [(128, 128), (32, 32)] problem_params['eps'] = [0.04] problem_params['dw'] = [-23.6] problem_params['radius'] = 0.25 problem_params['comm'] = space_comm problem_params['name'] = name problem_params['init_type'] = 'circle' problem_params['spectral'] = False # initialize step parameters step_params = dict() step_params['maxiter'] = 50 # initialize controller parameters controller_params = dict() controller_params[ 'logger_level'] = 20 if space_rank == 0 else 99 # set level depending on rank controller_params['hook_class'] = dump # fill description dictionary for easy step instantiation description = dict() # description['problem_class'] = allencahn_imex description['problem_class'] = allencahn_imex_timeforcing description['problem_params'] = problem_params # pass problem parameters description['sweeper_class'] = imex_1st_order description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description['space_transfer_class'] = fft_to_fft # set time parameters t0 = 0.0 Tend = 32 * 0.001 # instantiate controller controller = controller_MPI(controller_params=controller_params, description=description, comm=time_comm) # get initial values on finest level P = controller.S.levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) if space_rank == 0: # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') print() niters = np.array([item[1] for item in iter_counts]) out = f'Mean number of iterations on rank {time_rank}: {np.mean(niters):.4f}' print(out) timing = sort_stats(filter_stats(stats, type='timing_setup'), sortby='time') out = f'Setup time on rank {time_rank}: {timing[0][1]:.4f} sec.' print(out) timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') out = f'Time to solution on rank {time_rank}: {timing[0][1]:.4f} sec.' print(out) print() # convert filtered statistics to list of computed radii, sorted by time computed_radii = sort_stats(filter_stats(stats, type='computed_radius'), sortby='time') exact_radii = sort_stats(filter_stats(stats, type='exact_radius'), sortby='time') # print radii and error over time for cr, er in zip(computed_radii, exact_radii): if er[1] > 0: err = abs(cr[1] - er[1]) / er[1] else: err = 1.0 out = f'Computed/exact/error radius for time {cr[0]:6.4f}: ' \ f'{cr[1]:6.4f} / {er[1]:6.4f} / {err:6.4e}' print(out)
def run_diffusion(QI): """ A simple test program to test PFASST convergence for the heat equation with random initial data Args: QI: preconditioner """ # initialize level parameters level_params = dict() level_params['restol'] = 1E-08 level_params['nsweeps'] = [3, 1] # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] sweeper_params['QI'] = [QI, 'LU'] sweeper_params['spread'] = False # initialize problem parameters problem_params = dict() problem_params['nu'] = 0.1 # diffusion coefficient problem_params['freq'] = -1 # frequency for the test value problem_params['nvars'] = [127, 63 ] # number of degrees of freedom for each level # initialize step parameters step_params = dict() step_params['maxiter'] = 200 # initialize space transfer parameters space_transfer_params = dict() space_transfer_params['rorder'] = 2 space_transfer_params['iorder'] = 2 space_transfer_params['periodic'] = False # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 30 controller_params['predict'] = False # fill description dictionary for easy step instantiation description = dict() description['problem_class'] = heat1d # pass problem class description['problem_params'] = problem_params # pass problem parameters description['dtype_u'] = mesh # pass data type for u description['dtype_f'] = mesh # pass data type for f description[ 'sweeper_class'] = generic_implicit # pass sweeper (see part B) description['sweeper_params'] = sweeper_params # pass sweeper parameters description['step_params'] = step_params # pass step parameters description[ 'space_transfer_class'] = mesh_to_mesh # pass spatial transfer class description[ 'space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set time parameters t0 = 0.0 Tend = 1.0 # set up number of parallel time-steps to run PFASST with fname = 'data/results_conv_diffusion_Linf_QI' + str(QI) + '.txt' file = open(fname, 'w') writer = csv.writer(file) writer.writerow(('num_proc', 'niter')) file.close() for i in range(0, 13): num_proc = 2**i level_params['dt'] = (Tend - t0) / num_proc description['level_params'] = level_params # pass level parameters out = 'Working on num_proc = %5i' % num_proc print(out) cfl = problem_params['nu'] * level_params['dt'] / ( 1.0 / (problem_params['nvars'][0] + 1))**2 out = ' CFL number: %4.2e' % cfl print(out) # instantiate controller controller = allinclusive_multigrid_nonMPI( num_procs=num_proc, controller_params=controller_params, description=description) # get initial values on finest level P = controller.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # filter statistics by type (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') niters = np.array([item[1] for item in iter_counts]) out = ' Mean number of iterations: %4.2f' % np.mean(niters) print(out) file = open(fname, 'a') writer = csv.writer(file) writer.writerow((num_proc, np.mean(niters))) file.close() assert os.path.isfile(fname), 'ERROR: pickle did not create file'
def run_SDC_variant(variant=None): """ Routine to run particular SDC variant Args: variant (str): string describing the variant Returns: timing (float) niter (float) """ # load (incomplete) default parameters description, controller_params = setup_parameters() # add stuff based on variant if variant == 'semi-implicit': description['problem_class'] = allencahn2d_imex description['sweeper_class'] = imex_1st_order elif variant == 'semi-implicit-stab': description['problem_class'] = allencahn2d_imex_stab description['sweeper_class'] = imex_1st_order else: raise NotImplemented('Wrong variant specified, got %s' % variant) # setup parameters "in time" t0 = 0.0 Tend = 0.02 # set MPI communicator comm = MPI.COMM_WORLD world_rank = comm.Get_rank() world_size = comm.Get_size() # split world communicator to create space-communicators if len(sys.argv) >= 2: color = int(world_rank / int(sys.argv[1])) else: color = int(world_rank / 1) space_comm = comm.Split(color=color) space_size = space_comm.Get_size() space_rank = space_comm.Get_rank() # split world communicator to create time-communicators if len(sys.argv) >= 2: color = int(world_rank % int(sys.argv[1])) else: color = int(world_rank / world_size) time_comm = comm.Split(color=color) time_size = time_comm.Get_size() time_rank = time_comm.Get_rank() print( "IDs (world, space, time): %i / %i -- %i / %i -- %i / %i" % (world_rank, world_size, space_rank, space_size, time_rank, time_size)) description['problem_params']['comm'] = space_comm # set level depending on rank controller_params['logger_level'] = controller_params[ 'logger_level'] if space_rank == 0 else 99 # instantiate controller controller = controller_MPI(controller_params=controller_params, description=description, comm=time_comm) # get initial values on finest level P = controller.S.levels[0].prob uinit = P.u_exact(t0) # if time_rank == 0: # plt_helper.plt.imshow(uinit.values) # plt_helper.savefig(f'uinit_{space_rank}', save_pdf=False, save_pgf=False, save_png=True) # exit() # call main function to get things done... uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend) # if time_rank == 0: # plt_helper.plt.imshow(uend.values) # plt_helper.savefig(f'uend_{space_rank}', save_pdf=False, save_pgf=False, save_png=True) # exit() rank = comm.Get_rank() # filter statistics by variant (number of iterations) filtered_stats = filter_stats(stats, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts = sort_stats(filtered_stats, sortby='time') # compute and print statistics niters = np.array([item[1] for item in iter_counts]) print(f'Mean number of iterations on rank {rank}: {np.mean(niters):.4f}') if rank == 0: timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time') print(f'---> Time to solution: {timing[0][1]:.4f} sec.') print() return stats