def load_data(resultspath): # Sort the data from different simulations dirs = FT.get_result_dirs(resultspath) resultsdir = FT.sort_by(dirs, "eps") number_simulations = FT.get_number_simulations(resultspath) ekindata = [] epotdata = [] axisdata = [] iom = IOManager() for resultdir in resultsdir: resultsfile = FT.get_results_file(resultdir) print(" Reading " + resultsfile) iom.open_file(filename=resultsfile) parameters = iom.load_parameters() number_components = parameters["ncomponents"] axisdata.append(parameters["eps"]) ekin, epot = iom.load_energy() ekindata.append(ekin) epotdata.append(epot) iom.finalize() return (axisdata, ekindata, epotdata, number_simulations, number_components)
def load_data(resultspath): # Sort the data from different simulations according to the filenames dirs = FT.get_result_dirs(resultspath) resultsdir = FT.sort_by(dirs, "eps") number_simulations = FT.get_number_simulations(resultspath) normdata = [] axisdata = [] iom = IOManager() for resultdir in resultsdir: resultsfile = FT.get_results_file(resultdir) print(" Reading " + resultsfile) iom.open_file(filename=resultsfile) parameters = iom.load_parameters() number_components = parameters["ncomponents"] axisdata.append(parameters["eps"]) norms = iom.load_norm() normdata.append(norms) iom.finalize() return (axisdata, normdata, number_simulations, number_components)
if __name__ == "__main__": iom_o = IOManager() iom_s = IOManager() # NOTE # # first cmd-line data file is spawning data # second cmd-line data file is reference data # Read file with new simulation data try: iom_s.open_file(filename=sys.argv[1]) except IndexError: iom_s.open_file() # Read file with original reference simulation data try: iom_o.open_file(filename=sys.argv[2]) except IndexError: iom_o.open_file() # The axes rectangle that is plotted view = [-8.5, 8.5] gids = iom_s.get_group_ids(exclude=["global"]) for gid in gids:
def load_data(resultsdir, evaluation_times, which_norm="wf"): """This script assumes filename specification: something_eps=..._dt=..._[h|f]_other_things. We group the simulations first by eps and then by dt. """ iom_f = IOManager() iom_h = IOManager() # Group the data from different simulations according to epsilon ids = get_result_dirs(resultsdir) eps_groups = group_by(ids, "eps") # Data structures for results epsdata = [ None for i in xrange(len(eps_groups)) ] axisdata = [ [] for i in xrange(len(eps_groups)) ] normdata = [ [ [] for i in xrange(len(eps_groups)) ] for t in xrange(len(evaluation_times)) ] # Loop over all simulations, grouped by same eps value for index, eps_group in enumerate(eps_groups): # Partition into fourier and hagedorn simulations dirs_f = gather_all(eps_group, "algorithm=fourier") dirs_h = gather_all(eps_group, "algorithm=hagedorn") if len(dirs_f) != len(dirs_h): raise ValueError("Found different number of fourier and hagedorn simulations!") # And sort by dt value dirs_f = sort_by(dirs_f, "dt") dirs_h = sort_by(dirs_h, "dt") # Loop over all simulations with same eps values sorted by size of dt for dir_f, dir_h in zip(dirs_f, dirs_h): print("Comparing simulation " + dir_h + " with " + dir_f) resultsfile_f = get_results_file(dir_f) iom_f.open_file(filename=resultsfile_f) resultsfile_h = get_results_file(dir_h) iom_h.open_file(filename=resultsfile_h) # Read the parameters parameters_f = iom_f.load_parameters() parameters_h = iom_h.load_parameters() # Scalar parameter of the x axis axisdata[index].append(parameters_f["dt"]) # Get the data grid = iom_f.load_grid(blockid="global") WF = WaveFunction(parameters_f) WF.set_grid(grid) # Convert times to timesteps using the time manager tm = parameters_f.get_timemanager() # Loop over all times for i, time in enumerate(evaluation_times): print(" at time T: " + str(time)) step = tm.compute_timestep(time) data_f = iom_f.load_wavefunction(timestep=step) data_h = iom_h.load_wavefunction(timestep=step) # Compute the norm || u_f - u_h || for all timesteps data_diff = data_f - data_h if which_norm == "wf": WF.set_values( [ data_diff[0,...] ] ) no = WF.get_norm(summed=True) elif which_norm == "2": no = norm( data_diff[0,...] ) elif which_norm == "max": no = max( data_diff[0,...] ) # Append norm values to global data structure normdata[i][index].append(no) # Scalar parameter of the different curves # We add this here because the simulation parameters are # already loaded but not overwritten yet be the next iteration # Remember: we need only a single epsilon out of each eps_group. epsdata[index] = parameters_f["eps"] iom_f.finalize() iom_h.finalize() # Convert lists to arrays epsdata = array(epsdata) axisdata = [ array(item) for item in axisdata ] return (times, epsdata, axisdata, normdata)
if fill: ax.fill(grid, ew, facecolor="blue", alpha=0.25) ax.plot(grid, ew, label=r"$\lambda_"+str(index)+r"$") ax.ticklabel_format(style="sci", scilimits=(0,0), axis="y") ax.grid(True) ax.set_xlabel(r"$x$") ax.set_ylabel(r"$\lambda_i\left(x\right)$") legend(loc="outer right") ax.set_title(r"The eigenvalues $\lambda_i$ of the potential $V\left(x\right)$") fig.savefig("potential"+GD.output_format) close(fig) if __name__ == "__main__": iom = IOManager() # Read file with simulation data try: iom.open_file(filename=sys.argv[1]) except IndexError: iom.open_file() parameters = iom.load_parameters() potential = PotentialFactory().create_potential(parameters) grid = iom.load_grid(blockid="global") plot_potential(grid, potential, fill=False) iom.finalize()
def load_data(resultspath, which_norm="wf"): # Sort the data from different simulations ids = FT.get_result_dirs(resultspath) dirs_f = FT.gather_all(ids, "fourier") dirs_h = FT.gather_all(ids, "hagedorn") dirs_f = FT.sort_by(dirs_f, "eps") dirs_h = FT.sort_by(dirs_h, "eps") if len(dirs_f) != len(dirs_h): raise ValueError("Found different number of fourier and hagedorn simulations!") number_simulations = len(dirs_f) normdata = [] axisdata = [] iom_f = IOManager() iom_h = IOManager() # Loop over all simulations for dir_f, dir_h in zip(dirs_f, dirs_h): print("Comparing simulation " + dir_h + " with " + dir_f) # Load the simulation data files resultsfile_f = FT.get_results_file(dir_f) iom_f.open_file(filename=resultsfile_f) resultsfile_h = FT.get_results_file(dir_h) iom_h.open_file(filename=resultsfile_h) # Read the parameters parameters_f = iom_f.load_parameters() parameters_h = iom_h.load_parameters() number_components = parameters_f["ncomponents"] # Scalar parameter that discriminates the simulations axisdata.append(parameters_f["eps"]) # Get the data grid = iom_f.load_grid(blockid="global") timesteps = iom_f.load_wavefunction_timegrid() data_f = iom_f.load_wavefunction() data_h = iom_h.load_wavefunction() # Compute the norm || u_f - u_h ||_L2 for all timesteps data_diff = data_f - data_h WF = WaveFunction(parameters_f) WF.set_grid(grid) norms = [] for i, step in enumerate(timesteps): if which_norm == "wf": WF.set_values([ data_diff[i,0,:] ]) no = WF.get_norm() elif which_norm == "2": no = norm(data_diff[i,0,:]) elif which_norm == "max": no = max(data_diff[i,0,:]) norms.append(no) # Append norm values to global data structure norms = array(norms) normdata.append(norms) iom_f.finalize() iom_h.finalize() return (axisdata, normdata, number_simulations, number_components)
def load_data(resultspath, which_norm="wf"): # Group the data from different simulations ids = FT.get_result_dirs(resultspath) ids = FT.group_by(ids, "eps") nsims = FT.get_number_simulations(resultspath) groupdata = [] axisdata = [ [] for i in xrange(nsims) ] normdata = [ [] for i in xrange(nsims) ] iom_f = IOManager() iom_h = IOManager() for index, sims in enumerate(ids): # Sorting based on file names dirs_f = FT.gather_all(sims, "fourier") dirs_h = FT.gather_all(sims, "hagedorn") if len(dirs_f) != len(dirs_h): raise ValueError("Found different number of fourier and hagedorn simulations!") dirs_f = FT.sort_by(dirs_f, "eps", as_string=True) dirs_h = FT.sort_by(dirs_h, "eps", as_string=True) # Loop over all simulations for dir_f, dir_h in zip(dirs_f, dirs_h): print("Comparing simulation " + dir_h + " with " + dir_f) resultsfile_f = FT.get_results_file(dir_f) iom_f.open_file(filename=resultsfile_f) resultsfile_h = FT.get_results_file(dir_h) iom_h.open_file(filename=resultsfile_h) # Read the parameters parameters_f = iom_f.load_parameters() parameters_h = iom_h.load_parameters() grid = iom_f.load_grid(blockid="global") # Precalculate eigenvectors for efficiency Potential = PotentialFactory().create_potential(parameters_f) eigenvectors = Potential.evaluate_eigenvectors_at(grid) # Get the data # Number of time steps we saved timesteps = iom_f.load_wavefunction_timegrid() # Scalar parameter that discriminates the simulations axisdata[index].append((parameters_f, timesteps)) WF = WaveFunction(parameters_f) WF.set_grid(grid) norms = [] for i, step in enumerate(timesteps): # Load the data that belong to the current timestep data_f = iom_f.load_wavefunction(timestep=step) data_h = iom_h.load_wavefunction(timestep=step) data_f = Potential.project_to_eigen(grid, data_f, eigenvectors) data_f = array(data_f) data_diff = data_f - data_h # Compute the norm || u_f - u_h || if which_norm == "wf": # Rearrange data to fit the input of WF and handle over WF.set_values([ data_diff[n,:] for n in xrange(parameters_f.ncomponents) ]) curnorm = WF.get_norm() # More than one component? If yes, compute also the overall norm if parameters_f.ncomponents > 1: nosum = WF.get_norm(summed=True) curnorm = list(curnorm) + [nosum] elif which_norm == "max": curnorm = [ max( abs(data_diff[n,:]) ) for n in xrange(parameters_f.ncomponents) ] # More than one component? If yes, compute also the overall norm if parameters_f.ncomponents > 1: nosum = max(curnorm) curnorm = list(curnorm) + [nosum] print(" at time " + str(step*parameters_f.dt) + " the error norm is " + str(curnorm)) norms.append(curnorm) # Append norm values to global data structure norms = array(norms) normdata[index].append(norms) # Scalar parameter of the different curves # We add this here because the simulation parameters are # already loaded but not overwritten yet be the next iteration # Remember: we need only a single epsilon out of each eps_group. groupdata.append(parameters_f.dt) iom_f.finalize() iom_h.finalize() return (groupdata, axisdata, normdata)