def load_data(resultspath): # Sort the data from different simulations dirs = FT.get_result_dirs(resultspath) resultsdir = FT.sort_by(dirs, "eps") number_simulations = FT.get_number_simulations(resultspath) ekindata = [] epotdata = [] axisdata = [] iom = IOManager() for resultdir in resultsdir: resultsfile = FT.get_results_file(resultdir) print(" Reading " + resultsfile) iom.open_file(filename=resultsfile) parameters = iom.load_parameters() number_components = parameters["ncomponents"] axisdata.append(parameters["eps"]) ekin, epot = iom.load_energy() ekindata.append(ekin) epotdata.append(epot) iom.finalize() return (axisdata, ekindata, epotdata, number_simulations, number_components)
def load_data(resultspath): # Sort the data from different simulations according to the filenames dirs = FT.get_result_dirs(resultspath) resultsdir = FT.sort_by(dirs, "eps") number_simulations = FT.get_number_simulations(resultspath) normdata = [] axisdata = [] iom = IOManager() for resultdir in resultsdir: resultsfile = FT.get_results_file(resultdir) print(" Reading " + resultsfile) iom.open_file(filename=resultsfile) parameters = iom.load_parameters() number_components = parameters["ncomponents"] axisdata.append(parameters["eps"]) norms = iom.load_norm() normdata.append(norms) iom.finalize() return (axisdata, normdata, number_simulations, number_components)
def load_data(resultspath, which_norm="wf"): # Sort the data from different simulations ids = FT.get_result_dirs(resultspath) dirs_f = FT.gather_all(ids, "fourier") dirs_h = FT.gather_all(ids, "hagedorn") dirs_f = FT.sort_by(dirs_f, "eps") dirs_h = FT.sort_by(dirs_h, "eps") if len(dirs_f) != len(dirs_h): raise ValueError("Found different number of fourier and hagedorn simulations!") number_simulations = len(dirs_f) normdata = [] axisdata = [] iom_f = IOManager() iom_h = IOManager() # Loop over all simulations for dir_f, dir_h in zip(dirs_f, dirs_h): print("Comparing simulation " + dir_h + " with " + dir_f) # Load the simulation data files resultsfile_f = FT.get_results_file(dir_f) iom_f.open_file(filename=resultsfile_f) resultsfile_h = FT.get_results_file(dir_h) iom_h.open_file(filename=resultsfile_h) # Read the parameters parameters_f = iom_f.load_parameters() parameters_h = iom_h.load_parameters() number_components = parameters_f["ncomponents"] # Scalar parameter that discriminates the simulations axisdata.append(parameters_f["eps"]) # Get the data grid = iom_f.load_grid(blockid="global") timesteps = iom_f.load_wavefunction_timegrid() data_f = iom_f.load_wavefunction() data_h = iom_h.load_wavefunction() # Compute the norm || u_f - u_h ||_L2 for all timesteps data_diff = data_f - data_h WF = WaveFunction(parameters_f) WF.set_grid(grid) norms = [] for i, step in enumerate(timesteps): if which_norm == "wf": WF.set_values([ data_diff[i,0,:] ]) no = WF.get_norm() elif which_norm == "2": no = norm(data_diff[i,0,:]) elif which_norm == "max": no = max(data_diff[i,0,:]) norms.append(no) # Append norm values to global data structure norms = array(norms) normdata.append(norms) iom_f.finalize() iom_h.finalize() return (axisdata, normdata, number_simulations, number_components)
def load_data(resultspath, which_norm="wf"): # Group the data from different simulations ids = FT.get_result_dirs(resultspath) ids = FT.group_by(ids, "eps") nsims = FT.get_number_simulations(resultspath) groupdata = [] axisdata = [ [] for i in xrange(nsims) ] normdata = [ [] for i in xrange(nsims) ] iom_f = IOManager() iom_h = IOManager() for index, sims in enumerate(ids): # Sorting based on file names dirs_f = FT.gather_all(sims, "fourier") dirs_h = FT.gather_all(sims, "hagedorn") if len(dirs_f) != len(dirs_h): raise ValueError("Found different number of fourier and hagedorn simulations!") dirs_f = FT.sort_by(dirs_f, "eps", as_string=True) dirs_h = FT.sort_by(dirs_h, "eps", as_string=True) # Loop over all simulations for dir_f, dir_h in zip(dirs_f, dirs_h): print("Comparing simulation " + dir_h + " with " + dir_f) resultsfile_f = FT.get_results_file(dir_f) iom_f.open_file(filename=resultsfile_f) resultsfile_h = FT.get_results_file(dir_h) iom_h.open_file(filename=resultsfile_h) # Read the parameters parameters_f = iom_f.load_parameters() parameters_h = iom_h.load_parameters() grid = iom_f.load_grid(blockid="global") # Precalculate eigenvectors for efficiency Potential = PotentialFactory().create_potential(parameters_f) eigenvectors = Potential.evaluate_eigenvectors_at(grid) # Get the data # Number of time steps we saved timesteps = iom_f.load_wavefunction_timegrid() # Scalar parameter that discriminates the simulations axisdata[index].append((parameters_f, timesteps)) WF = WaveFunction(parameters_f) WF.set_grid(grid) norms = [] for i, step in enumerate(timesteps): # Load the data that belong to the current timestep data_f = iom_f.load_wavefunction(timestep=step) data_h = iom_h.load_wavefunction(timestep=step) data_f = Potential.project_to_eigen(grid, data_f, eigenvectors) data_f = array(data_f) data_diff = data_f - data_h # Compute the norm || u_f - u_h || if which_norm == "wf": # Rearrange data to fit the input of WF and handle over WF.set_values([ data_diff[n,:] for n in xrange(parameters_f.ncomponents) ]) curnorm = WF.get_norm() # More than one component? If yes, compute also the overall norm if parameters_f.ncomponents > 1: nosum = WF.get_norm(summed=True) curnorm = list(curnorm) + [nosum] elif which_norm == "max": curnorm = [ max( abs(data_diff[n,:]) ) for n in xrange(parameters_f.ncomponents) ] # More than one component? If yes, compute also the overall norm if parameters_f.ncomponents > 1: nosum = max(curnorm) curnorm = list(curnorm) + [nosum] print(" at time " + str(step*parameters_f.dt) + " the error norm is " + str(curnorm)) norms.append(curnorm) # Append norm values to global data structure norms = array(norms) normdata[index].append(norms) # Scalar parameter of the different curves # We add this here because the simulation parameters are # already loaded but not overwritten yet be the next iteration # Remember: we need only a single epsilon out of each eps_group. groupdata.append(parameters_f.dt) iom_f.finalize() iom_h.finalize() return (groupdata, axisdata, normdata)