def adaptive_epsopt(dirlist, title, norm, Bs, ax): e, Nb = [], [] ref = 'ref_flusi/T5_2048_double/flusiphi_000002500000.h5' if len(dirlist) == 0: raise ValueError('no data') for d in dirlist: if (os.path.isfile(d + '/fullphi_000002500000.h5')): # compute error err = wabbit_tools.wabbit_error_vs_flusi( d + '/fullphi_000002500000.h5', ref, norm=norm) e.append(err) # compute number of points N, Bs = wabbit_tools.fetch_Nblocks_RHS_dir(d, return_Bs=True) Nb.append((Bs - 1) * np.sqrt(N)) # sort data by number of blocks Nb, e = zip(*sorted(zip(Nb, e))) plt.loglog(Nb, e, label=title + " [%2.2f]" % (wabbit_tools.convergence_order(Nb, e)), marker='o')
def equidistant(rootdir, title, norm, ax): dirsx = glob.glob(rootdir + '*') Nblocks, e = [], [] ref = 'ref_flusi/T5_2048_double/flusiphi_000002500000.h5' if len(dirsx) == 0: raise ValueError('no data') for d in dirsx: if (os.path.isfile(d + '/fullphi_000002500000.h5')): err = wabbit_tools.wabbit_error_vs_flusi( d + '/fullphi_000002500000.h5', ref, norm=norm) N, Bs = wabbit_tools.fetch_Nblocks_dir(d, return_Bs=True) # append to plot-lists Nblocks.append((Bs - 1) * np.sqrt(N)) e.append(err) Nblocks, e = zip(*sorted(zip(Nblocks, e))) ax.loglog(Nblocks, e, label=title + " [%2.2f]" % (wabbit_tools.convergence_order(Nblocks, e)), marker='o')
def do_test(rootdir, J, name, ax1, ax2): dirsx = glob.glob(rootdir + '*' + 'Jmax' + J + '*') EPS = [] err = [] Nblocks = [] reffile = 'ref_flusi/T5_2048_double/flusiphi_000002500000.h5' for d in dirsx: if (os.path.isfile(d + '/fullphi_000002500000.h5')): e = wabbit_tools.wabbit_error_vs_flusi(d + '/fullphi_000002500000.h5', reffile, norm=norm) err.append(e) Nblocks.append(wabbit_tools.fetch_compression_rate_dir(d)) # Nblocks.append( wabbit_tools.fetch_Nblocks_dir(d) ) EPS.append(wabbit_tools.fetch_eps_dir(d)) # sort the lists (by eps) EPS, err, Nblocks = zip(*sorted(zip(EPS, err, Nblocks))) # name = name +" [%2.2f]" % (wabbit_tools.convergence_order(EPS,err)) ax1.loglog(EPS, err, label=name, marker='o') ax2.semilogx(EPS, Nblocks, label=name, marker='o')
def do_test2(rootdir, tt, name, reffile): dirsx = glob.glob(rootdir) tcpu=[] err=[] f = '/fullphi_00000'+tt+'00000.h5' for d in dirsx: if (os.path.isfile(d+f)): e = wabbit_tools.wabbit_error_vs_flusi( d+f, reffile, norm=norm ) err.append(e) ncpu = len( glob.glob(d+'/*times.dat') ) d = insect_tools.load_t_file(d+'/timesteps_info.t') tcpu.append( np.sum( d[:,1] )*float(ncpu) ) else: warnings.warn("No data in that dir="+d+f) # sort the lists (by eps) err, tcpu = zip(*sorted(zip(err, tcpu ))) plt.figure(1) plt.loglog( err, tcpu, label=name, marker='o')
l2plt = [0] * len(Jmax_list) linfplt = [0] * len(Jmax_list) for j, jmax_dir in enumerate(Jmax_dir_list): for i, eps_dir in enumerate(eps_dir_list): print(i) fname_dense = glob.glob(jmax_dir + "/" + eps_dir + '/' + quantity + '-dense_*' + config_id + '*.h5') fname_sparse = glob.glob(jmax_dir + "/" + eps_dir + '/' + quantity + '-sparse_*' + config_id + '*.h5') fname_ref = glob.glob(jmax_dir + "/" + eps_dir + '/' + quantity + '_*' + config_id + '*.h5') # compute Lp errors l2error[i] = wt.wabbit_error_vs_flusi(fname_dense[0], fname_flusi[0], norm=2, dim=2) # l2error[i] = wt.wabbit_error_vs_(fname_ref[0],fname_dense[0], norm=2, dim=2) linferror[i] = wt.wabbit_error_vs_flusi(fname_dense[0], fname_flusi[0], norm=np.inf, dim=2) # compute compression Nblocks[i] = sum(wt.block_level_distribution_file(fname_sparse[0])) # compute number of dense blokcs Nblocksdense = sum(wt.block_level_distribution_file(fname_dense[0])) ############################################################################## # Plot