def get_basis_functions(self, prob=None): """ Assemble a dictionary containing lists of Okada basis functions on each subfault. Each basis function associated with a subfault has active controls set to zero on all other subfaults and only one non-zero active control on the subfault itself, set to one. All passive controls retain the value that they hold before assembly. """ from adapt_utils.unsteady.solver import AdaptiveProblem prob = prob or AdaptiveProblem(self) self._basis_functions = {} # Stash the control parameters and zero out all active ones tmp = self.control_parameters.copy() for control in self.active_controls: self.control_parameters[control] = np.zeros(self.num_subfaults) # Loop over active controls on each subfault and compute the associated basis functions msg = "INIT: Assembling Okada basis function array with active controls {:}..." print_output(msg.format(self.active_controls)) msg = "INIT: Assembling '{:s}' basis function on subfault {:d}/{:d}..." for control in self.active_controls: self._basis_functions[control] = [] for i, subfault in enumerate(self.subfaults): self.print_debug(msg.format(control, i, self.num_subfaults), mode='full') self.control_parameters[control][i] = 1 self.set_initial_condition(prob, annotate_source=False, subtract_from_bathymetry=False) self._basis_functions[control].append( prob.fwd_solutions[0].copy(deepcopy=True)) self.control_parameters[control][i] = 0 self.control_parameters = tmp
'ny': fac_y, 'plot_pvd': True, 'input_dir': inputdir, 'output_dir': outputdir, 'nonlinear_method': 'relaxation', 'r_adapt_rtol': tol_value, # Spatial discretisation 'family': 'dg-dg', 'stabilisation': None, 'stabilisation_sediment': None, 'friction': 'manning' } op = BeachOptions(**kwargs) assert op.num_meshes == 1 swp = AdaptiveProblem(op) def gradient_interface_monitor(mesh, alpha=alpha, beta=beta, gamma=gamma, K=kappa): """ Monitor function focused around the steep_gradient (budd acta numerica) NOTE: Defined on the *computational* mesh. """ P1 = FunctionSpace(mesh, "CG", 1)
'ny': fac_y, 'plot_pvd': True, 'input_dir': inputdir, 'output_dir': outputdir, # Spatial discretisation 'family': 'dg-dg', 'stabilisation': None, 'stabilisation_sediment': None, 'friction': 'manning' } op = BeachOptions(**kwargs) if os.getenv('REGRESSION_TEST') is not None: op.dt_per_export = 18 op.end_time = op.dt * op.dt_per_export swp = AdaptiveProblem(op) t1 = time.time() swp.solve_forward() t2 = time.time() if os.getenv('REGRESSION_TEST') is not None: sys.exit(0) print(t2 - t1) new_mesh = RectangleMesh(880, 20, 220, 10) bath = Function(FunctionSpace(new_mesh, "CG", 1)).project(swp.fwd_solutions_bathymetry[0]) fpath = "hydrodynamics_beach_bath_fixed_{:d}_{:d}".format(
op = TohokuBoxBasisOptions(**kwargs) op.di = di op.plot_pvd = plot_pvd # Bookkeeping for storing total variation num_cells.append(op.default_mesh.num_cells()) gauges = list(op.gauges.keys()) if errors['timeseries'] == {}: errors['timeseries'] = {gauge: [] for gauge in gauges} errors['timeseries_smooth'] = {gauge: [] for gauge in gauges} N = int(np.ceil(np.sqrt(len(gauges)))) # Interpolate initial condition from [Saito et al. 2011] into a P1 space on the current mesh op_saito = TohokuOptions(mesh=op.default_mesh, **kwargs) swp_saito = AdaptiveProblem(op_saito, nonlinear=nonlinear, print_progress=op.debug) ic_saito = op_saito.set_initial_condition(swp_saito) # Project Saito's initial condition into the box basis swp_box = AdaptiveProblem(op, nonlinear=nonlinear, print_progress=op.debug) op.project(swp_box, ic_saito) op.set_initial_condition(swp_box) ic_box = project(swp_box.fwd_solutions[0].split()[1], swp_box.P1[0]) # Load or save timeseries, as appropriate if plot_only: for gauge in gauges: for options, name in zip((op_saito, op), ('original', 'projected')): for tt in errors:
'ny': ny, 'plot_pvd': True, 'input_dir': inputdir, 'output_dir': outputdir, 'nonlinear_method': 'relaxation', 'r_adapt_rtol': 1.0e-3, # Spatial discretisation 'family': 'dg-dg', 'stabilisation': None, 'stabilisation_sediment': None, 'friction': 'manning' } op = BeachOptions(**kwargs) assert op.num_meshes == 1 swp = AdaptiveProblem(op) def velocity_monitor(mesh, alpha=alpha, beta=beta, gamma=gamma, K=kappa): P1 = FunctionSpace(mesh, "CG", 1) b = swp.fwd_solutions_bathymetry[0] if b is not None: abs_hor_vel_norm = Function(b.function_space()).project(conditional(b > 0.0, Constant(1.0), Constant(0.0))) else: abs_hor_vel_norm = Function(swp.bathymetry[0].function_space()).project(conditional(swp.bathymetry[0] > 0.0, Constant(1.0), Constant(0.0))) comp_new = project(abs_hor_vel_norm, P1) mon_init = project(1.0 + alpha * comp_new, P1) return mon_init
} if os.getenv('REGRESSION_TEST') is not None: kwargs['end_time'] = kwargs['dt'] * kwargs['dt_per_export'] fpath = 'resolution_{:d}'.format(n_coarse) if initial_monitor_type is not None: fpath = os.path.join(fpath, initial_monitor_type) fpath = os.path.join(fpath, monitor_type) op = BoydOptions(approach='monge_ampere', n=n_coarse, fpath=fpath, order=kwargs['order']) op.update(kwargs) # --- Initialise mesh swp = AdaptiveProblem(op) # Refine around equator and/or soliton if initial_monitor is not None: mesh_mover = MeshMover(swp.meshes[0], initial_monitor, method='monge_ampere', op=op) mesh_mover.adapt() mesh = Mesh(mesh_mover.x) op.__init__(mesh=mesh, **kwargs) swp.__init__(op, meshes=[mesh]) # --- Monitor function definitions
if args.dt is not None: op.dt = float(args.dt) if args.end_time is not None: op.end_time = float(args.end_time) op.di = create_directory(os.path.join(op.di, op.hessian_time_combination)) # --- Solve the tracer transport problem assert op.approach != 'fixed_mesh' for n in range(int(args.min_level or 0), int(args.max_level or 5)): op.target = 1000*2**n op.dt = 0.01*0.5**n op.dt_per_export = 2**n # Run simulation tp = AdaptiveProblem(op) cpu_timestamp = perf_counter() tp.run() times = [perf_counter() - cpu_timestamp] dofs = [Q.dof_count for Q in tp.Q] num_cells = [mesh.num_cells() for mesh in tp.meshes] # Assess error final_sol = tp.fwd_solutions_tracer[-1].copy(deepcopy=True) final_l1_norm = norm(final_sol, norm_type='L1') final_l2_norm = norm(final_sol, norm_type='L2') tp.set_initial_condition(i=-1) init_sol = tp.fwd_solutions_tracer[-1].copy(deepcopy=True) init_l1_norm = norm(init_sol, norm_type='L1') init_l2_norm = norm(init_sol, norm_type='L2') abs_l2_error = errornorm(init_sol, final_sol, norm_type='L2')
# Misc 'debug': bool(args.debug or False), } if os.getenv('REGRESSION_TEST') is not None: kwargs['end_time'] = 1.5 op = BubbleOptions(approach='lagrangian', n=int(args.n or 1)) op.update(kwargs) if args.dt is not None: op.dt = float(args.dt) if args.end_time is not None: op.end_time = float(args.end_time) # --- Initialise the mesh tp = AdaptiveProblem(op) # NOTE: We use Monge-Ampere with a monitor function indicating the initial condition alpha = 10.0 # Parameter controlling prominance of refined region eps = 1.0e-03 # Parameter controlling width of refined region def monitor(mesh): x, y = SpatialCoordinate(mesh) x0, y0, r = op.source_loc[0] return conditional(le(abs((x - x0)**2 + (y - y0)**2 - r**2), eps), alpha, 1.0) mesh_mover = MeshMover(tp.meshes[0], monitor, method='monge_ampere', op=op)
'debug': bool(args.debug or False), } if os.getenv('REGRESSION_TEST') is not None: kwargs['end_time'] = 30.0 fpath = 'resolution_{:d}'.format(n_coarse) if monitor is not None: fpath = os.path.join(fpath, monitor_type) op = BoydOptions(approach='ale' if ale else 'fixed_mesh', fpath=fpath, n=n_coarse, order=kwargs['order']) op.update(kwargs) # --- Initialise mesh swp = AdaptiveProblem(op) # Refine around equator and/or soliton if monitor is not None: mesh_mover = MeshMover(swp.meshes[0], monitor, method='monge_ampere', op=op) mesh_mover.adapt() mesh = Mesh(mesh_mover.x) op.__init__(mesh=mesh, **kwargs) swp.__init__(op, meshes=[mesh]) # Apply constant mesh velocity if ale: raise NotImplementedError # FIXME
'approach': 'fixed_mesh', # Discretisation 'tracer_family': args.family or 'dg', 'stabilisation_tracer': args.stabilisation, 'use_limiter_for_tracers': bool(args.limiters or True), 'use_tracer_conservative_form': bool(args.conservative or False), # Misc 'debug': bool(args.debug or False), } # --- Create solver and copy initial solution ep = AdaptiveProblem(CosinePrescribedVelocityOptions(**kwargs)) ep.set_initial_condition() init_sol = ep.fwd_solutions_tracer[0].copy(deepcopy=True) init_norm = norm(init_sol) # --- Eulerian interpretation ep.solve_forward() final_sol_eulerian = ep.fwd_solutions_tracer[-1] relative_error_eulerian = abs(errornorm(init_sol, final_sol_eulerian)/init_norm) print_output("Relative error in Eulerian case: {:.2f}%".format(100*relative_error_eulerian)) # --- Lagrangian interpretation
constructor = TohokuRadialBasisOptions elif basis == 'okada': from adapt_utils.case_studies.tohoku.options.okada_options import TohokuOkadaBasisOptions constructor = TohokuOkadaBasisOptions else: raise ValueError("Basis type '{:s}' not recognised.".format(basis)) fontsize = 22 fontsize_tick = 20 # Load control parameters fname = os.path.join(data_dir, 'discrete', 'optimisation_progress_{:s}' + '_{:d}.npy'.format(level)) kwargs['control_parameters'] = np.load(fname.format('ctrl', level))[-1] op = constructor(**kwargs) # Plot source over whole domain swp = AdaptiveProblem(op) swp.set_initial_condition() fig, axes = plt.subplots(figsize=(8, 7)) cbar = fig.colorbar( tricontourf(swp.fwd_solutions[0].split()[1], levels=50, cmap='coolwarm', axes=axes), ax=axes) cbar.set_label(r'Elevation [$\mathrm m$]', size=fontsize) axes.axis(False) cbar.ax.tick_params(labelsize=fontsize_tick) plt.tight_layout() savefig('optimised_source_{:d}'.format(level), fpath=plot_dir, extensions=plot.extensions) # Zoom lonlat_corners = [(138, 32), (148, 42), (138, 42)] utm_corners = [lonlat_to_utm(*corner, 54) for corner in lonlat_corners] xlim = [utm_corners[0][0], utm_corners[1][0]]
'nx': nx, 'ny': ny, 'plot_pvd': True, 'output_dir': outputdir, 'nonlinear_method': 'relaxation', 'r_adapt_rtol': r_tol, # Spatial discretisation 'family': 'dg-dg', 'stabilisation': None, 'use_automatic_sipg_parameter': True, 'friction': 'quadratic' } op = BeachOptions(**kwargs) assert op.num_meshes == 1 swp = AdaptiveProblem(op) swp.shallow_water_options[0]['mesh_velocity'] = None def gradient_interface_monitor(mesh, mod=mod, beta_mod=beta_mod, alpha=alpha, beta=beta, gamma=gamma, x=None): """ Monitor function focused around the steep_gradient (budd acta numerica) NOTE: Defined on the *computational* mesh.
'ny': 1 if res < 4 else 2, 'plot_pvd': True, 'input_dir': inputdir, 'output_dir': outputdir, 'nonlinear_method': 'relaxation', 'r_adapt_rtol': rtol, # Spatial discretisation 'family': 'dg-dg', 'stabilisation': 'lax_friedrichs', 'stabilisation_sediment': 'lax_friedrichs', } op = TrenchSedimentOptions(**kwargs) op.dt_per_mesh_movement = freq assert op.num_meshes == 1 swp = AdaptiveProblem(op) def frobenius_monitor(mesh): """ Frobenius norm taken component-wise. """ P1 = FunctionSpace(mesh, "CG", 1) b = project(swp.fwd_solutions_bathymetry[0], P1) H = recovery.recover_hessian(b, op=op) frob = sqrt(H[0, 0]**2 + H[0, 1]**2 + H[1, 0]**2 + H[1, 1]**2) return 1 + alpha_const*frob/interpolate(frob, P1).vector().gather().max() def frobenius_monitor(mesh, x=None): # NOQA: Version above not smooth enough """
'stabilisation_tracer': args.stabilisation or 'supg', 'use_limiter_for_tracers': bool(args.limiters or False), 'debug': bool(args.debug or False), } l2_error = [] cons_error = [] times = [] num_cells = [] dofs = [] for level in range(4): # Setup op = BubbleOptions(approach='fixed_mesh', n=level) op.update(kwargs) op.dt_per_export = 2**level tp = AdaptiveProblem(op) dofs.append(tp.Q[0].dof_count) num_cells.append(tp.mesh.num_cells()) tp.set_initial_condition() init_l1_norm = norm(tp.fwd_solutions_tracer[0], norm_type='L1') init_l2_norm = norm(tp.fwd_solutions_tracer[0], norm_type='L2') init_sol = tp.fwd_solutions_tracer[0].copy(deepcopy=True) # Solve forward problem cpu_timestamp = perf_counter() tp.solve_forward() times.append(perf_counter() - cpu_timestamp) # Compare initial and final tracer concentrations final_l1_norm = norm(tp.fwd_solutions_tracer[0], norm_type='L1') final_l2_norm = norm(tp.fwd_solutions_tracer[0], norm_type='L2')
num_meshes = int(args.num_meshes or 50) op.end_time /= num_meshes dt_per_mesh = int(op.end_time / op.dt) end_time = op.end_time dtc = Constant(op.dt) if metric_advection: op.di = os.path.join(op.di, 'metric_advection') else: op.di = os.path.join(op.di, 'on_the_fly') if plot_pvd: tracer_file = File(os.path.join(op.di, 'tracer.pvd')) theta = Constant(0.5) # Generate initial mesh tic = perf_counter() tp = AdaptiveProblem(op) for i in range(op.num_adapt): print("INITIAL MESH STEP {:d}".format(i)) tp.set_initial_condition() c = tp.fwd_solution_tracer M = steady_metric(c, V=tp.P1_ten[0], normalise=True, enforce_constraints=True, op=op) tp = AdaptiveProblem(op, meshes=adapt(tp.mesh, M)) tp.set_initial_condition() # Time loop dofs = [] num_cells = []