def method(ts, time=None, step=0, show=True, save=False, latex=False, **kwargs): """ Plot at given timestep using matplotlib. """ info_cyan("Plotting at given time/step using Matplotlib.") if time is not None: step, time = get_step_and_info(ts, time) if rank == 0: for field in ts.fields: if save: save_fig_file = os.path.join( ts.plots_folder, "{}_{:06d}.png".format(field, step)) else: save_fig_file = None plot_any_field(ts.nodes, ts.elems, ts[field, step], save=save_fig_file, show=show, label=field, latex=latex)
def get_help(methods, methods_folder, caller=__file__, skip=0): info("Usage:\n python " + os.path.basename(caller) + " method=... [optional arguments]\n") max_len = max([len(method) for method in methods]) list_string = "{method:<" + str(max_len) + "} {opt_args_str}" title_string = "{:<" + str(max_len) + "} {}" info_cyan(title_string.format( "Method", "Optional arguments (=default value)")) for method in sorted(methods): m = __import__("{}.{}".format(methods_folder, method)) func = m.__dict__[method].method opt_args_str = "" argcount = func.__code__.co_argcount if argcount > 1: opt_args = zip(func.__code__.co_varnames[skip:], func.__defaults__) opt_args_str = ", ".join(["=".join([str(item) for item in pair]) for pair in opt_args]) info(list_string.format( method=method, opt_args_str=opt_args_str)) exit()
def method(ts, show=False, save=True, dt=None, fps=25, skip=0, delete_after=True, plot_u=False, inverse_phase=False, **kwargs): """ Make fancy gif animation. """ info_cyan("Making a fancy gif animation.") anim_name = "animation" ts.compute_charge() steps = get_steps(ts, dt)[::(skip + 1)] for step in steps[rank::size]: info("Step " + str(step) + " of " + str(len(ts))) if "phi" in ts: phi = ts["phi", step][:, 0] else: phi = np.zeros(len(ts.nodes)) - 1. if inverse_phase: phi = -phi charge = ts["charge", step][:, 0] charge_max = max(ts.max("charge"), -ts.min("charge")) charge_max = max(charge_max, 1e-8) # Remove numerical noise if plot_u and "u" in ts: u = ts["u", step] else: u = None if save: save_file = os.path.join(ts.tmp_folder, anim_name + "_{:06d}.png".format(step)) else: save_file = None plot_fancy(ts.nodes, ts.elems, phi, charge, charge_max=charge_max, show=show, u=u, save=save_file) comm.Barrier() if save and rank == 0: tmp_files = os.path.join(ts.tmp_folder, anim_name + "_*.png") anim_file = os.path.join(ts.plots_folder, anim_name + ".gif") os.system(("convert -delay {delay} {tmp_files} -trim +repage" " -loop 0 {anim_file}").format(tmp_files=tmp_files, anim_file=anim_file, delay=int(100. / fps))) if delete_after: os.system("rm {tmp_files}".format(tmp_files=tmp_files))
def method(ts, dt=0, extra_boundaries="", **kwargs): """ Plot value in time. """ info_cyan("Plot value at boundary in time.") params = ts.get_parameters() steps = get_steps(ts, dt) problem = params["problem"] info("Problem: {}".format(problem)) boundary_to_mark, ds = fetch_boundaries(ts, problem, params, extra_boundaries) x_ = ts.functions() fields = dict() for field, f in x_.items(): if field == "u": fields["u_x"] = f[0] fields["u_y"] = f[1] else: fields[field] = f t = np.zeros(len(steps)) data = dict() for boundary_name in boundary_to_mark: data[boundary_name] = dict() for field in fields: data[boundary_name][field] = np.zeros(len(steps)) for i, step in enumerate(steps): info("Step {} of {}".format(step, len(ts))) for field in x_: ts.update(x_[field], field, step) for boundary_name, (mark, k) in boundary_to_mark.items(): for field, f in fields.items(): data[boundary_name][field][i] = df.assemble(f * ds[k](mark)) t[i] = ts.times[step] savedata = dict() field_keys = sorted(fields.keys()) for boundary_name in boundary_to_mark: savedata[boundary_name] = np.array( list( zip(steps, t, *[data[boundary_name][field] for field in field_keys]))) if rank == 0: header = "Step\tTime\t" + "\t".join(field_keys) for boundary_name in boundary_to_mark: with open( os.path.join(ts.analysis_folder, "value_in_time_{}.dat".format(boundary_name)), "w") as outfile: np.savetxt(outfile, savedata[boundary_name], header=header)
def method(ts, time=None, step=0, **kwargs): """ Plot at given time/step using dolfin. """ info_cyan("Plotting at given timestep using Dolfin.") step, time = get_step_and_info(ts, time) f = ts.functions() for field in ts.fields: ts.update(f[field], field, step) df.plot(f[field]) df.interactive()
def method(ts, dt=0, **kwargs): """ Analyze geometry in time.""" info_cyan("Analyzing the evolution of the geometry through time.") if not ts.get_parameter("enable_PF"): print "Phase field not enabled." return False f_mask = df.Function(ts.function_space) f_mask_x = [] f_mask_u = [] for d in range(ts.dim): f_mask_x.append(df.Function(ts.function_space)) f_mask_u.append(df.Function(ts.function_space)) length = np.zeros(len(ts)) area = np.zeros(len(ts)) com = np.zeros((len(ts), ts.dim)) u = np.zeros((len(ts), ts.dim)) makedirs_safe(os.path.join(ts.analysis_folder, "contour")) steps = get_steps(ts, dt) for step in steps: info("Step " + str(step) + " of " + str(len(ts))) phi = ts["phi", step][:, 0] mask = 0.5 * (1. - phi) # 0.5*(1.-np.sign(phi)) ts.set_val(f_mask, mask) for d in range(ts.dim): ts.set_val(f_mask_x[d], mask * ts.nodes[:, d]) ts.set_val(f_mask_u[d], mask * ts["u", step][:, d]) contour_file = os.path.join(ts.analysis_folder, "contour", "contour_{:06d}.dat".format(step)) paths = zero_level_set(ts.nodes, ts.elems, phi, save_file=contour_file) length[step] = path_length(paths) area[step] = df.assemble(f_mask * df.dx) for d in range(ts.dim): com[step, d] = df.assemble(f_mask_x[d] * df.dx) u[step, d] = df.assemble(f_mask_u[d] * df.dx) for d in range(ts.dim): com[:, d] /= area u[:, d] /= area if rank == 0: np.savetxt(os.path.join(ts.analysis_folder, "time_data.dat"), np.array( zip(np.arange(len(ts)), ts.times, length, area, com[:, 0], com[:, 1], u[:, 0], u[:, 1])), header=("Timestep\tTime\tLength\tArea\t" "CoM_x\tCoM_y\tU_x\tU_y"))
def method(ts, time=None, step=0, **kwargs): """ Plot at given time/step using dolfin. """ info_cyan("Plotting at given timestep using Dolfin.") step, time = get_step_and_info(ts, time) f = ts.functions() for i, field in enumerate(ts.fields): ts.update(f[field], field, step) plt.figure(i) df.plot(f[field], title=field) plt.show()
def method(ts, dt=0, **kwargs): """ Plot mean field values in time. """ info_cyan("Plot mean field values in time.") params = ts.get_parameters() steps = get_steps(ts, dt) problem = params["problem"] info("Problem: {}".format(problem)) t = np.zeros(len(steps)) x_ = ts.functions() fields = dict() for field, f in x_.items(): if field == "u": fields["u_x"] = f[0] fields["u_y"] = f[1] else: fields[field] = f data = dict() for field in fields: data[field] = np.zeros(len(steps)) for i, step in enumerate(steps): info("Step {} of {}".format(step, len(ts))) for field in x_.keys(): ts.update(x_[field], field, step) for field, f in fields.items(): data[field][i] = df.assemble(f * df.dx) t[i] = ts.times[step] field_keys = sorted(fields.keys()) savedata = np.array( list(zip(steps, t, *[data[field] for field in field_keys]))) if rank == 0: header = "Step\tTime\t" + "\t".join(field_keys) with open(os.path.join(ts.analysis_folder, "value_in_time.dat"), "w") as outfile: np.savetxt(outfile, savedata, header=header)
def method(ts, show=True, save_fig=False, latex=False, **kwargs): """ Mesh info and plot. """ info_cyan("Mesh info and plot.") f = df.Function(ts.function_space) f.vector()[:] = 1. area = df.assemble(f*df.dx) info("Number of nodes: {}".format(len(ts.nodes))) info("Number of elements: {}".format(len(ts.elems))) info("Total mesh area: {}".format(area)) info("Mean element area: {}".format(area/len(ts.elems))) if rank == 0: save_fig_file = None if save_fig: save_fig_file = os.path.join(ts.plots_folder, "mesh.png") plot_faces(ts.nodes, ts.elems, title="Mesh", save=save_fig_file, show=show, latex=latex)
def method(ts, dt=0, **kwargs): """ Plot energy in time. """ info_cyan("Plot energy in time.") params = ts.get_parameters() steps = get_steps(ts, dt) problem = params["problem"] info("Problem: {}".format(problem)) solver = params["solver"] info("Solver: {}".format(solver)) exec("from solvers.{} import discrete_energy".format(solver)) t = np.zeros(len(steps)) x_ = ts.functions() F_keys = discrete_energy(None, **params) F = [] for i in range(len(F_keys)): F.append(np.zeros(len(steps))) for i, step in enumerate(steps): info("Step {} of {}".format(step, len(ts))) for field in x_: ts.update(x_[field], field, step) fs = discrete_energy(x_, **params) for j in range(len(F_keys)): F[j][i] = df.assemble(fs[j] * df.dx) t[i] = ts.times[step] data = np.array(zip(steps, t, *F)) if rank == 0: filename = os.path.join(ts.analysis_folder, "energy_in_time.dat") np.savetxt(filename, data, header="Step\tTime\t" + "\t".join(F_keys))
def method(ts, time=None, step=0, show=False, save_fig=False, **kwargs): """ Compare to analytic reference expression at given timestep. This is done by importing the function "reference" in the problem module. """ info_cyan("Comparing to analytic reference at given time or step.") step, time = get_step_and_info(ts, time, step) parameters = ts.get_parameters(time=time) problem = parameters.get("problem", "intrusion_bulk") try: module = importlib.import_module("problems.{}".format(problem)) reference = module.reference except: info_error("No analytic reference available.") ref_exprs = reference(t=time, **parameters) info("Comparing to analytic solution.") info_split("Problem:", "{}".format(problem)) info_split("Time:", "{}".format(time)) f = ts.functions(ref_exprs.keys()) err = dict() f_int = dict() f_ref = dict() for field in ref_exprs.keys(): el = f[field].function_space().ufl_element() degree = el.degree() if bool(el.value_size() != 1): W = df.VectorFunctionSpace(ts.mesh, "CG", degree + 3) else: W = df.FunctionSpace(ts.mesh, "CG", degree + 3) err[field] = df.Function(W) f_int[field] = df.Function(W) f_ref[field] = df.Function(W) for field, ref_expr in ref_exprs.items(): ref_expr.t = time # Update numerical solution f ts.update(f[field], field, step) # Interpolate f to higher space f_int[field].assign( df.interpolate(f[field], f_int[field].function_space())) # Interpolate f_ref to higher space f_ref[field].assign( df.interpolate(ref_expr, f_ref[field].function_space())) err[field].vector()[:] = (f_int[field].vector().get_local() - f_ref[field].vector().get_local()) if show or save_fig: # Interpolate the error to low order space for visualisation. err_int = df.interpolate(err[field], f[field].function_space()) err_arr = ts.nodal_values(err_int) label = "Error in " + field if rank == 0: save_fig_file = None if save_fig: save_fig_file = os.path.join( ts.plots_folder, "error_{}_time{}_analytic.png".format(field, time)) plot_any_field(ts.nodes, ts.elems, err_arr, save=save_fig_file, show=show, label=label) save_file = os.path.join(ts.analysis_folder, "errornorms_time{}_analytic.dat".format(time)) compute_norms(err, save=save_file)
def method(ts, dx=0.1, line="[0.,0.]--[1.,1.]", time=None, dt=None, skip=0, **kwargs): """ Probe along a line. """ info_cyan("Probe along a line.") try: x_a, x_b = [tuple(eval(pt)) for pt in line.split("--")] assert (len(x_a) == ts.dim) assert (len(x_b) == ts.dim) assert (all([ bool(isinstance(xd, float) or isinstance(xd, int)) for xd in list(x_a) + list(x_b) ])) except: info_on_red("Faulty line format. Use 'line=[x1,y1]--[x2,y2]'.") exit() x = np.array(line_points(x_a, x_b, dx)) info("Probes {num} points from {a} to {b}".format(num=len(x), a=x_a, b=x_b)) if rank == 0: plot_probes(ts.nodes, ts.elems, x, colorbar=False, title="Probes") f = ts.functions() probes = dict() from fenicstools import Probes for field, func in f.iteritems(): probes[field] = Probes(x.flatten(), func.function_space()) steps = get_steps(ts, dt, time) for step in steps: info("Step " + str(step) + " of " + str(len(ts))) ts.update_all(f, step) for field, probe in probes.iteritems(): probe(f[field]) probe_arr = dict() for field, probe in probes.iteritems(): probe_arr[field] = probe.array() if rank == 0: for i, step in enumerate(steps): chunks = [x] header_list = [index2letter(d) for d in range(ts.dim)] for field, chunk in probe_arr.iteritems(): if chunk.ndim == 1: header_list.append(field) chunk = chunk[:].reshape(-1, 1) elif chunk.ndim == 2: header_list.append(field) chunk = chunk[:, i].reshape(-1, 1) elif chunk.ndim > 2: header_list.extend( [field + "_" + index2letter(d) for d in range(ts.dim)]) chunk = chunk[:, :, i] chunks.append(chunk) data = np.hstack(chunks) header = "\t".join(header_list) makedirs_safe(os.path.join(ts.analysis_folder, "probes")) np.savetxt(os.path.join(ts.analysis_folder, "probes", "probes_{:06d}.dat".format(step)), data, header=header)
def method(ts, ref=None, time=1., show=False, save_fig=False, **kwargs): """Compare to numerical reference at given timestep. The reference solution is assumed to be on a finer mesh, so the reference solution is interpolated to the coarser mesh, where the comparison is made. """ info_cyan("Comparing to numerical reference.") if not isinstance(ref, str): info_on_red("No reference specified. Use ref=(path).") exit() ts_ref = TimeSeries(ref, sought_fields=ts.fields) info_split("Reference fields:", ", ".join(ts_ref.fields)) # Compute a 'reference ID' for storage purposes ref_id = os.path.relpath(ts_ref.folder, os.path.join(ts.folder, "../")).replace( "../", "-").replace("/", "+") step, time_0 = ts.get_nearest_step_and_time(time) step_ref, time_ref = ts_ref.get_nearest_step_and_time( time, dataset_str="reference") info("Dataset: Time = {}, timestep = {}.".format(time_0, step)) info("Reference: Time = {}, timestep = {}.".format(time_ref, step_ref)) # from fenicstools import interpolate_nonmatching_mesh f = ts.functions() f_ref = ts_ref.functions() err = ts_ref.functions() ts.update_all(f, step=step) ts_ref.update_all(f_ref, step=step_ref) for field in ts_ref.fields: # Interpolate solution to the reference mesh. f_int = df.interpolate(f[field], err[field].function_space()) err[field].vector()[:] = (f_int.vector().array() - f_ref[field].vector().array()) if show or save_fig: err_arr = ts_ref.nodal_values(err[field]) label = "Error in " + field if rank == 0: save_fig_file = None if save_fig: save_fig_file = os.path.join( ts.plots_folder, "error_{}_time{}_ref{}.png".format( field, time, ref_id)) plot_any_field(ts_ref.nodes, ts_ref.elems, err_arr, save=save_fig_file, show=show, label=label) save_file = os.path.join( ts.analysis_folder, "errornorms_time{}_ref{}.dat".format(time, ref_id)) compute_norms(err, save=save_file)
def method(ts, dt=0, extra_boundaries="", **kwargs): """ Plot flux in time. """ info_cyan("Plot flux in time.") params = ts.get_parameters() steps = get_steps(ts, dt) problem = params["problem"] info("Problem: {}".format(problem)) boundary_to_mark, ds = fetch_boundaries(ts, problem, params, extra_boundaries) x_ = ts.functions() if params["enable_NS"]: u = x_["u"] else: u = df.Constant(0.) if params["enable_PF"]: phi = x_["phi"] g = x_["g"] exec("from problems.{} import pf_mobility".format(problem)) M = pf_mobility(phi, params["pf_mobility_coeff"]) else: phi = 1. g = df.Constant(0.) M = df.Constant(0.) solutes = params["solutes"] c = [] c_grad_g_c = [] if params["enable_EC"]: V = x_["V"] else: V = df.Constant(0.) dbeta = [] # Diff. in beta z = [] # Charge z[species] K = [] # Diffusivity K[species] beta = [] # Conc. jump func. beta[species] for solute in solutes: ci = x_[solute[0]] dbetai = dramp([solute[4], solute[5]]) c.append(ci) z.append(solute[1]) K.append(ramp(phi, [solute[2], solute[3]])) beta.append(ramp(phi, [solute[4], solute[5]])) dbeta.append(dbetai) # THIS HAS NOT BEEN GENERALIZED! c_grad_g_ci = df.grad(ci) + solute[1] * ci * df.grad(V) if params["enable_PF"]: c_grad_g_ci += dbetai * df.grad(phi) c_grad_g_c.append(c_grad_g_ci) nu = ramp(phi, params["viscosity"]) veps = ramp(phi, params["permittivity"]) rho = ramp(phi, params["density"]) dveps = dramp(params["permittivity"]) drho = dramp(params["density"]) t = np.zeros(len(steps)) # Define the fluxes fluxes = dict() fluxes["Velocity"] = u fluxes["Phase"] = phi * u fluxes["Mass"] = rho * x_["u"] if params["enable_PF"]: fluxes["Phase"] += -M * df.grad(g) fluxes["Mass"] += -drho * M * df.grad(g) if params["enable_EC"]: for i, solute in enumerate(solutes): fluxes["Solute {}".format(solute[0])] = K[i] * c_grad_g_c[i] fluxes["E-field"] = -df.grad(V) data = dict() for boundary_name in boundary_to_mark: data[boundary_name] = dict() for flux_name in fluxes: data[boundary_name][flux_name] = np.zeros(len(steps)) n = df.FacetNormal(ts.mesh) for i, step in enumerate(steps): info("Step {} of {}".format(step, len(ts))) for field in x_: ts.update(x_[field], field, step) for boundary_name, (mark, k) in boundary_to_mark.items(): for flux_name, flux in fluxes.items(): data[boundary_name][flux_name][i] = df.assemble( df.dot(flux, n) * ds[k](mark)) t[i] = ts.times[step] savedata = dict() flux_keys = sorted(fluxes.keys()) for boundary_name in boundary_to_mark: savedata[boundary_name] = np.array( list( zip( steps, t, *[ data[boundary_name][flux_name] for flux_name in flux_keys ]))) if rank == 0: header = "Step\tTime\t" + "\t".join(flux_keys) for boundary_name in boundary_to_mark: with open( os.path.join(ts.analysis_folder, "flux_in_time_{}.dat".format(boundary_name)), "w") as outfile: np.savetxt(outfile, savedata[boundary_name], header=header)