Beispiel #1
0
    def __init__(self, mesh, conditions, timestepping, params, output, solver_params):

        self.timestepping = timestepping
        self.timestep = timestepping.timestep
        self.timescale = timestepping.timescale
        self.params = params
        if output is None:
            raise RuntimeError("You must provide a directory name for dumping results")
        else:
            self.output = output
        self.outfile = File(output.dirname)
        self.dump_count = 0
        self.dump_freq = output.dumpfreq
        self.solver_params = solver_params
        self.mesh = mesh
        self.conditions = conditions

        if conditions.steady_state == True:
            self.ind = 1
        else:
            self.ind = 1
        
        family = conditions.family
        self.x, self.y = SpatialCoordinate(mesh)
        self.n = FacetNormal(mesh)
        self.V = VectorFunctionSpace(mesh, family, conditions.order + 1)
        self.U = FunctionSpace(mesh, family, conditions.order + 1)
        self.U1 = FunctionSpace(mesh, 'DG', conditions.order)
        self.S = TensorFunctionSpace(mesh, 'DG', conditions.order)
        self.D = FunctionSpace(mesh, 'DG', 0)
        self.W1 = MixedFunctionSpace([self.V, self.S])
        self.W2 = MixedFunctionSpace([self.V, self.U1, self.U1])
        self.W3 = MixedFunctionSpace([self.V, self.S, self.U1, self.U1])
def run(dirname, vector, DG):

    state, f = setup_IPdiffusion(vector, DG)

    if DG:
        direction = [1,2]
    else:
        direction = [1]

    kappa = 0.05
    if vector:
        kappa = as_tensor([[kappa, 0.],[0., kappa]])
    mu = 5.
    dt = state.timestepping.dt
    tmax = 2.5
    t = 0.
    f_diffusion = InteriorPenalty(state, f.function_space(), direction=direction, params={"kappa":kappa, "mu":Constant(mu)})
    outfile = File(path.join(dirname, "IPdiffusion/field_output.pvd"))

    dumpcount = itertools.count()

    outfile.write(f)

    fp1 = Function(f.function_space())

    while t < tmax + 0.5*dt:
        t += dt
        f_diffusion.apply(f, fp1)
        f.assign(fp1)

        if(next(dumpcount) % 25) == 0:
            outfile.write(f)
    return f
def run(dirname, element, continuity=False, vector=False):

    state, dgfunctionspace, u0, f, f_end = setup_DGadvection(element, vector)

    dt = state.timestepping.dt
    tmax = pi / 4.0
    t = 0.0
    f_advection = EmbeddedDGAdvection(state, f.function_space(), Vdg=dgfunctionspace, continuity=continuity)

    fp1 = Function(f.function_space())
    f_advection.ubar.assign(u0)

    dumpcount = itertools.count()
    outfile = File(path.join(dirname, "field_output.pvd"))
    outfile.write(f)

    while t < tmax + 0.5 * dt:
        t += dt
        for i in range(2):
            f_advection.apply(f, fp1)
            f.assign(fp1)

        if (next(dumpcount) % 15) == 0:
            outfile.write(f)

    f_err = Function(f.function_space()).assign(f_end - f)
    return f_err
def export_fun(fname, *funs):
    """
    Export a list of functions to a VTK file (.pvd).

    Arguments
    ---------
    fname : str
        Filename to export to
    *funs : firedrake.Function
        Any number of functions to export
    """
    outfile = File(fname)
    outfile.write(*funs)
def run(dirname, direction):

    state, u0, f, f_end = setup_SUPGadvection(direction)

    dt = state.timestepping.dt
    tmax = 2.5
    t = 0.
    f_advection = SUPGAdvection(state, f.function_space(), direction=direction)

    fp1 = Function(f.function_space())
    f_advection.ubar.assign(u0)

    dumpcount = itertools.count()
    outfile = File(path.join(dirname, "field_output.pvd"))
    outfile.write(f, f_end)

    while t < tmax + 0.5*dt:
        t += dt
        f_advection.apply(f, fp1)
        f.assign(fp1)

        if(next(dumpcount) % 10) == 0:
            outfile.write(f, f_end)

    f_err = Function(f.function_space()).assign(f_end - f)
    errfile = File(path.join(dirname, "ferr.pvd"))
    errfile.write(f, f_end, f_err)
    return f_err
Beispiel #6
0
def create_output_file(name, comm, source_num):
    if io.is_owner(comm, source_num):
        outfile = File(
            os.getcwd() + "/results/shots_" + str(source_num) + "_ensemble_" +
            str(comm.ensemble_comm.rank) + name,
            comm=comm.comm,
        )
        return outfile
Beispiel #7
0
    def solve(self, file_path='ttip_result/solution.pvd'):
        """
        Setup and solve the nonlinear problem.
        Save value to file given.
        Any additional keyword arguments are passed to the iteration method.

        Args:
            file_path (string, optional):
                The path to save the pvd file to.
                vtk files will be generated in the same directory as the pvd.
                It is recommended that this is a separate drectory per run.
                Defaults to 'TTiP_result/solution.pvd'.
        """
        F = self.problem.a - self.problem.L
        steady_state = self.is_steady_state()

        if isinstance(self.problem, BoundaryMixin):
            var_prob = NonlinearVariationalProblem(F,
                                                   self.u,
                                                   bcs=self.problem.bcs)
        else:
            var_prob = NonlinearVariationalProblem(F, self.u)
        solver = NonlinearVariationalSolver(problem=var_prob,
                                            solver_parameters=self.params)

        outfile = File(file_path)
        outfile.write(self.u, target_degree=1, target_continuity=H1)

        if steady_state:
            solver.solve()
            outfile.write(self.u, target_degree=1, target_continuity=H1)
        else:
            self.problem.T_.assign(self.u)
            last_perc = 0
            for i in range(self.problem.steps):
                solver.solve()

                perc = int(100 * (i + 1) / self.problem.steps)
                if perc > last_perc:
                    print(f'{perc}%')
                    last_perc = perc

                self.problem.T_.assign(self.u)
                outfile.write(self.u, target_degree=1, target_continuity=H1)
def regularization_form(r):
    mesh = UnitSquareMesh(2 ** r, 2 ** r)
    x = SpatialCoordinate(mesh)

    S = VectorFunctionSpace(mesh, "CG", 1)
    beta = 4.0
    reg_solver = RegularizationSolver(S, mesh, beta=beta, gamma=0.0, dx=dx)

    # Exact solution with free Neumann boundary conditions for this domain
    u_exact = Function(S)
    u_exact_component = cos(x[0] * pi * 2) * cos(x[1] * pi * 2)
    u_exact.interpolate(as_vector((u_exact_component, u_exact_component)))
    f = Function(S)
    theta = TestFunction(S)
    f_component = (1 + beta * 8 * pi * pi) * u_exact_component
    f.interpolate(as_vector((f_component, f_component)))
    rhs_form = inner(f, theta) * dx

    velocity = Function(S)
    rhs = assemble(rhs_form)
    reg_solver.solve(velocity, rhs)
    File("solution_vel_unitsquare.pvd").write(velocity)
    return norm(project(u_exact - velocity, S))
Beispiel #9
0
    def setup_dump(self, t, tmax, pickup=False):
        """
        Setup dump files
        Check for existence of directory so as not to overwrite
        output files
        Setup checkpoint file

        :arg tmax: model stop time
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """

        if any([
                self.output.dump_vtus, self.output.dumplist_latlon,
                self.output.dump_diagnostics, self.output.point_data,
                self.output.checkpoint and not pickup
        ]):
            # setup output directory and check that it does not already exist
            self.dumpdir = path.join("results", self.output.dirname)
            running_tests = '--running-tests' in sys.argv or "pytest" in self.output.dirname
            if self.mesh.comm.rank == 0:
                if not running_tests and path.exists(
                        self.dumpdir) and not pickup:
                    raise IOError("results directory '%s' already exists" %
                                  self.dumpdir)
                else:
                    if not running_tests:
                        makedirs(self.dumpdir)

        if self.output.dump_vtus:

            # setup pvd output file
            outfile = path.join(self.dumpdir, "field_output.pvd")
            self.dumpfile = File(outfile,
                                 project_output=self.output.project_fields,
                                 comm=self.mesh.comm)

            # make list of fields to dump
            self.to_dump = [
                f for f in self.fields if f.name() in self.fields.to_dump
            ]

            # make dump counter
            self.dumpcount = itertools.count()

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh and make output file
        if len(self.output.dumplist_latlon) > 0:
            mesh_ll = get_latlon_mesh(self.mesh)
            outfile_ll = path.join(self.dumpdir, "field_output_latlon.pvd")
            self.dumpfile_ll = File(outfile_ll,
                                    project_output=self.output.project_fields,
                                    comm=self.mesh.comm)

            # make functions on latlon mesh, as specified by dumplist_latlon
            self.to_dump_latlon = []
            for name in self.output.dumplist_latlon:
                f = self.fields(name)
                field = Function(functionspaceimpl.WithGeometry.create(
                    f.function_space(), mesh_ll),
                                 val=f.topological,
                                 name=name + '_ll')
                self.to_dump_latlon.append(field)

        # we create new netcdf files to write to, unless pickup=True, in
        # which case we just need the filenames
        if self.output.dump_diagnostics:
            diagnostics_filename = self.dumpdir + "/diagnostics.nc"
            self.diagnostic_output = DiagnosticsOutput(diagnostics_filename,
                                                       self.diagnostics,
                                                       self.output.dirname,
                                                       self.mesh.comm,
                                                       create=not pickup)

        if len(self.output.point_data) > 0:
            # set up point data output
            pointdata_filename = self.dumpdir + "/point_data.nc"
            ndt = int(tmax / float(self.dt))
            self.pointdata_output = PointDataOutput(pointdata_filename,
                                                    ndt,
                                                    self.output.point_data,
                                                    self.output.dirname,
                                                    self.fields,
                                                    self.mesh.comm,
                                                    self.output.tolerance,
                                                    create=not pickup)

            # make point data dump counter
            self.pddumpcount = itertools.count()

            # set frequency of point data output - defaults to
            # dumpfreq if not set by user
            if self.output.pddumpfreq is None:
                self.output.pddumpfreq = self.output.dumpfreq

        # if we want to checkpoint and are not picking up from a previous
        # checkpoint file, setup the checkpointing
        if self.output.checkpoint:
            if not pickup:
                self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                            mode=FILE_CREATE)
            # make list of fields to pickup (this doesn't include
            # diagnostic fields)
            self.to_pickup = [
                f for f in self.fields if f.name() in self.fields.to_pickup
            ]

        # if we want to checkpoint then make a checkpoint counter
        if self.output.checkpoint:
            self.chkptcount = itertools.count()

        # dump initial fields
        self.dump(t)
Beispiel #10
0
class State(object):
    """
    Build a model state to keep the variables in, and specify parameters.

    :arg mesh: The :class:`Mesh` to use.
    :arg dt: The time step as a :class:`Constant`. If a float or int is passed,
             it will be cast to a :class:`Constant`.
    :arg output: class containing output parameters
    :arg parameters: class containing physical parameters
    :arg diagnostics: class containing diagnostic methods
    :arg diagnostic_fields: list of diagnostic field classes
    """
    def __init__(self,
                 mesh,
                 dt,
                 output=None,
                 parameters=None,
                 diagnostics=None,
                 diagnostic_fields=None):

        if output is None:
            raise RuntimeError(
                "You must provide a directory name for dumping results")
        else:
            self.output = output
        self.parameters = parameters

        if diagnostics is not None:
            self.diagnostics = diagnostics
        else:
            self.diagnostics = Diagnostics()
        if diagnostic_fields is not None:
            self.diagnostic_fields = diagnostic_fields
        else:
            self.diagnostic_fields = []

        # The mesh
        self.mesh = mesh

        self.spaces = SpaceCreator(mesh)

        if self.output.dumplist is None:

            self.output.dumplist = []

        self.fields = StateFields(*self.output.dumplist)

        self.dumpdir = None
        self.dumpfile = None
        self.to_pickup = None

        # figure out if we're on a sphere
        try:
            self.on_sphere = (mesh._base_mesh.geometric_dimension() == 3
                              and mesh._base_mesh.topological_dimension() == 2)
        except AttributeError:
            self.on_sphere = (mesh.geometric_dimension() == 3
                              and mesh.topological_dimension() == 2)

        #  build the vertical normal and define perp for 2d geometries
        dim = mesh.topological_dimension()
        if self.on_sphere:
            x = SpatialCoordinate(mesh)
            R = sqrt(inner(x, x))
            self.k = interpolate(x / R, mesh.coordinates.function_space())
            if dim == 2:
                outward_normals = CellNormal(mesh)
                self.perp = lambda u: cross(outward_normals, u)
        else:
            kvec = [0.0] * dim
            kvec[dim - 1] = 1.0
            self.k = Constant(kvec)
            if dim == 2:
                self.perp = lambda u: as_vector([-u[1], u[0]])

        # setup logger
        logger.setLevel(output.log_level)
        set_log_handler(mesh.comm)
        if parameters is not None:
            logger.info("Physical parameters that take non-default values:")
            logger.info(", ".join("%s: %s" % (k, float(v))
                                  for (k, v) in vars(parameters).items()))

        #  Constant to hold current time
        self.t = Constant(0.0)
        if type(dt) is Constant:
            self.dt = dt
        elif type(dt) in (float, int):
            self.dt = Constant(dt)
        else:
            raise TypeError(
                f'dt must be a Constant, float or int, not {type(dt)}')

    def setup_diagnostics(self):
        """
        Add special case diagnostic fields
        """
        for name in self.output.perturbation_fields:
            f = Perturbation(name)
            self.diagnostic_fields.append(f)

        for name in self.output.steady_state_error_fields:
            f = SteadyStateError(self, name)
            self.diagnostic_fields.append(f)

        fields = set([f.name() for f in self.fields])
        field_deps = [(d, sorted(set(d.required_fields).difference(fields), ))
                      for d in self.diagnostic_fields]
        schedule = topo_sort(field_deps)
        self.diagnostic_fields = schedule
        for diagnostic in self.diagnostic_fields:
            diagnostic.setup(self)
            self.diagnostics.register(diagnostic.name)

    def setup_dump(self, t, tmax, pickup=False):
        """
        Setup dump files
        Check for existence of directory so as not to overwrite
        output files
        Setup checkpoint file

        :arg tmax: model stop time
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """

        if any([
                self.output.dump_vtus, self.output.dumplist_latlon,
                self.output.dump_diagnostics, self.output.point_data,
                self.output.checkpoint and not pickup
        ]):
            # setup output directory and check that it does not already exist
            self.dumpdir = path.join("results", self.output.dirname)
            running_tests = '--running-tests' in sys.argv or "pytest" in self.output.dirname
            if self.mesh.comm.rank == 0:
                if not running_tests and path.exists(
                        self.dumpdir) and not pickup:
                    raise IOError("results directory '%s' already exists" %
                                  self.dumpdir)
                else:
                    if not running_tests:
                        makedirs(self.dumpdir)

        if self.output.dump_vtus:

            # setup pvd output file
            outfile = path.join(self.dumpdir, "field_output.pvd")
            self.dumpfile = File(outfile,
                                 project_output=self.output.project_fields,
                                 comm=self.mesh.comm)

            # make list of fields to dump
            self.to_dump = [
                f for f in self.fields if f.name() in self.fields.to_dump
            ]

            # make dump counter
            self.dumpcount = itertools.count()

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh and make output file
        if len(self.output.dumplist_latlon) > 0:
            mesh_ll = get_latlon_mesh(self.mesh)
            outfile_ll = path.join(self.dumpdir, "field_output_latlon.pvd")
            self.dumpfile_ll = File(outfile_ll,
                                    project_output=self.output.project_fields,
                                    comm=self.mesh.comm)

            # make functions on latlon mesh, as specified by dumplist_latlon
            self.to_dump_latlon = []
            for name in self.output.dumplist_latlon:
                f = self.fields(name)
                field = Function(functionspaceimpl.WithGeometry.create(
                    f.function_space(), mesh_ll),
                                 val=f.topological,
                                 name=name + '_ll')
                self.to_dump_latlon.append(field)

        # we create new netcdf files to write to, unless pickup=True, in
        # which case we just need the filenames
        if self.output.dump_diagnostics:
            diagnostics_filename = self.dumpdir + "/diagnostics.nc"
            self.diagnostic_output = DiagnosticsOutput(diagnostics_filename,
                                                       self.diagnostics,
                                                       self.output.dirname,
                                                       self.mesh.comm,
                                                       create=not pickup)

        if len(self.output.point_data) > 0:
            # set up point data output
            pointdata_filename = self.dumpdir + "/point_data.nc"
            ndt = int(tmax / float(self.dt))
            self.pointdata_output = PointDataOutput(pointdata_filename,
                                                    ndt,
                                                    self.output.point_data,
                                                    self.output.dirname,
                                                    self.fields,
                                                    self.mesh.comm,
                                                    self.output.tolerance,
                                                    create=not pickup)

            # make point data dump counter
            self.pddumpcount = itertools.count()

            # set frequency of point data output - defaults to
            # dumpfreq if not set by user
            if self.output.pddumpfreq is None:
                self.output.pddumpfreq = self.output.dumpfreq

        # if we want to checkpoint and are not picking up from a previous
        # checkpoint file, setup the checkpointing
        if self.output.checkpoint:
            if not pickup:
                self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                            mode=FILE_CREATE)
            # make list of fields to pickup (this doesn't include
            # diagnostic fields)
            self.to_pickup = [
                f for f in self.fields if f.name() in self.fields.to_pickup
            ]

        # if we want to checkpoint then make a checkpoint counter
        if self.output.checkpoint:
            self.chkptcount = itertools.count()

        # dump initial fields
        self.dump(t)

    def pickup_from_checkpoint(self):
        """
        :arg t: the current model time (default is zero).
        """
        # TODO: this duplicates some code from setup_dump. Can this be avoided?
        # It is because we don't know if we are picking up or setting dump first
        if self.to_pickup is None:
            self.to_pickup = [
                f for f in self.fields if f.name() in self.fields.to_pickup
            ]
        # Set dumpdir if has not been done already
        if self.dumpdir is None:
            self.dumpdir = path.join("results", self.output.dirname)

        if self.output.checkpoint:
            # Open the checkpointing file for writing
            if self.output.checkpoint_pickup_filename is not None:
                chkfile = self.output.checkpoint_pickup_filename
            else:
                chkfile = path.join(self.dumpdir, "chkpt")
            with DumbCheckpoint(chkfile, mode=FILE_READ) as chk:
                # Recover all the fields from the checkpoint
                for field in self.to_pickup:
                    chk.load(field)
                t = chk.read_attribute("/", "time")
            # Setup new checkpoint
            self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                        mode=FILE_CREATE)
        else:
            raise ValueError("Must set checkpoint True if pickup")

        return t

    def dump(self, t):
        """
        Dump output
        """
        output = self.output

        # Diagnostics:
        # Compute diagnostic fields
        for field in self.diagnostic_fields:
            field(self)

        if output.dump_diagnostics:
            # Output diagnostic data
            self.diagnostic_output.dump(self, t)

        if len(output.point_data) > 0 and (next(self.pddumpcount) %
                                           output.pddumpfreq) == 0:
            # Output pointwise data
            self.pointdata_output.dump(self.fields, t)

        # Dump all the fields to the checkpointing file (backup version)
        if output.checkpoint and (next(self.chkptcount) %
                                  output.chkptfreq) == 0:
            for field in self.to_pickup:
                self.chkpt.store(field)
            self.chkpt.write_attribute("/", "time", t)

        if output.dump_vtus and (next(self.dumpcount) % output.dumpfreq) == 0:
            # dump fields
            self.dumpfile.write(*self.to_dump)

            # dump fields on latlon mesh
            if len(output.dumplist_latlon) > 0:
                self.dumpfile_ll.write(*self.to_dump_latlon)

    def initialise(self, initial_conditions):
        """
        Initialise state variables

        :arg initial_conditions: An iterable of pairs (field_name, pointwise_value)
        """
        for name, ic in initial_conditions:
            f_init = getattr(self.fields, name)
            f_init.assign(ic)
            f_init.rename(name)

    def set_reference_profiles(self, reference_profiles):
        """
        Initialise reference profiles

        :arg reference_profiles: An iterable of pairs (field_name, interpolatory_value)
        """
        for name, profile in reference_profiles:
            if name + 'bar' in self.fields:
                # For reference profiles already added to state, allow
                # interpolation from expressions
                ref = self.fields(name + 'bar')
            elif isinstance(profile, Function):
                # Need to add reference profile to state so profile must be
                # a Function
                ref = self.fields(name + 'bar',
                                  space=profile.function_space(),
                                  dump=False)
            else:
                raise ValueError(
                    f'When initialising reference profile {name}' +
                    ' the passed profile must be a Function')
            ref.interpolate(profile)
Beispiel #11
0
    'pc_python_type': 'firedrake.HybridizationPC',
    'hybridization': {
        'ksp_type': 'preonly',
        'pc_type': 'lu',
        'pc_factor_mat_solver_type': 'mumps'
    }
}

uD_problem = LinearVariationalProblem(lhs(eqn), rhs(eqn), xd)
uD_solver = LinearVariationalSolver(uD_problem, solver_parameters=params)

if COMM_WORLD.Get_rank() == 0:
    print("Finished setting up solvers at  ", ctime())

# Setup output
outfile = File('{0}.pvd'.format(fname))
field_output = [un, eta_out, En, vortn, qn, q2Dn]


# Create latlon version of output
def get_latlon_mesh(mesh):
    """Build 2D projected mesh of spherical mesh"""
    crds_orig = mesh.coordinates
    mesh_dg_fs = VectorFunctionSpace(mesh, "DG", 1)
    crds_dg = Function(mesh_dg_fs)
    crds_latlon = Function(mesh_dg_fs)
    par_loop(
        """
for (int i=0; i<3; i++) {
    for (int j=0; j<3; j++) {
        dg[i][j] = cg[i][j];
Beispiel #12
0
def test_forward_5shots():
    model = {}

    model["opts"] = {
        "method": "KMV",  # either CG or KMV
        "quadrature": "KMV",  # Equi or KMV
        "degree": 4,  # p order
        "dimension": 2,  # dimension
    }
    model["parallelism"] = {
        "type": "automatic",
    }
    model["mesh"] = {
        "Lz": 3.5,  # depth in km - always positive
        "Lx": 17.0,  # width in km - always positive
        "Ly": 0.0,  # thickness in km - always positive
        "meshfile": "meshes/marmousi_5Hz.msh",
        "initmodel": None,
        "truemodel": "velocity_models/vp_marmousi-ii.hdf5",
    }
    model["BCs"] = {
        "status": True,  # True or false
        "outer_bc": "non-reflective",  #  None or non-reflective (outer boundary condition)
        "damping_type": "polynomial",  # polynomial, hyperbolic, shifted_hyperbolic
        "exponent": 2,  # damping layer has a exponent variation
        "cmax": 4.5,  # maximum acoustic wave velocity in PML - km/s
        "R": 1e-6,  # theoretical reflection coefficient
        "lz": 0.9,  # thickness of the PML in the z-direction (km) - always positive
        "lx": 0.9,  # thickness of the PML in the x-direction (km) - always positive
        "ly": 0.0,  # thickness of the PML in the y-direction (km) - always positive
    }
    model["acquisition"] = {
        "source_type": "Ricker",
        "source_pos": spyro.create_transect((-0.1, 1.0), (-0.1, 15.0), 5),
        "frequency": 5.0,
        "delay": 1.0,
        "receiver_locations": spyro.create_transect((-0.1, 1.0), (-0.1, 15.0),13),
    }
    model["timeaxis"] = {
        "t0": 0.0,  #  Initial time for event
        "tf": 3.00,  # Final time for event
        "dt": 0.001,
        "amplitude": 1,  # the Ricker has an amplitude of 1.
        "nspool": 100,  # how frequently to output solution to pvds
        "fspool": 99999,  # how frequently to save solution to RAM
    }

    dt=model["timeaxis"]["dt"]
    final_time=model["timeaxis"]["tf"]

    comm = spyro.utils.mpi_init(model)

    mesh, V = spyro.io.read_mesh(model, comm)
    vp = spyro.io.interpolate(model, mesh, V, guess=False)

    if comm.ensemble_comm.rank == 0:
        File("true_velocity.pvd", comm=comm.comm).write(vp)
    sources = spyro.Sources(model, mesh, V, comm)
    receivers = spyro.Receivers(model, mesh, V, comm)
    wavelet = spyro.full_ricker_wavelet(
        dt=model["timeaxis"]["dt"],
        tf=model["timeaxis"]["tf"],
        freq=model["acquisition"]["frequency"],
    )
    p, p_r = spyro.solvers.forward(model, mesh, comm, vp, sources, wavelet, receivers)

    pass_error_test = False
    for source_id in range(len(model["acquisition"]["source_pos"])):
        if comm.ensemble_comm.rank == (source_id % comm.ensemble_comm.size):
            receiver_in_source_index= get_receiver_in_source_location(source_id, model)
            if source_id != len(model["acquisition"]["source_pos"])-1 or source_id == 0:
                receiver_comparison_index = receiver_in_source_index + 1
            else:
                receiver_comparison_index = receiver_in_source_index - 1
            error_percent = compare_velocity(p_r, receiver_in_source_index, receiver_comparison_index, model,dt)
            if error_percent < 5:
                pass_error_test = True
            print(f"For source = {source_id}: test = {pass_error_test}", flush = True)

    spyro.plots.plot_shots(model, comm, p_r, vmin=-1e-3, vmax=1e-3)
    spyro.io.save_shots(model, comm, p_r)
    assert pass_error_test
Beispiel #13
0
def nlspace_solve(problem: InfDimProblem,
                  params=None,
                  results=None,
                  descent_output_dir=None):
    """
    Solve the optimization problem
        min      J(phi)
        phi in V
        under the constraints
        g_i(phi)=0  for all i=0..p-1
        h_i(phi)<=0 for all i=0..q-1

    Usage
    -----
    results=nlspace_solve(problem: InfDimProblem, params: dict, results:dict)

    Inputs
    ------
    problem : an `~InfDimProblem` object corresponding to the optimization
                  problem above.

    params  : (optional) a dictionary containing algorithm parameters
              (see below).

    results : (optional) a previous output of the nlspace_solve` function.
              The optimization will keep going from the last input of
              the dictionary `results['phi'][-1]`.
              Useful to restart an optimization after an interruption.
    descent_output_dir : Plot the descent direction in the given directory

    Output
    ------
    results : dictionary containing
        results['J']       : values of the objective function along the path
                             (J(phi_0),...,J(phi_n))
        results['G']       : equality constraint values
                             (G(phi_0),...,G(phi_n))
        results['H']       : inequality constraints values
                             (H(phi_0),...,H(phi_n))
        results['muls']    : lagrange multiplier values
                             (mu(phi_0),...,mu(phi_n))


    Optional algorithm parameters
    -----------------------------

    params['alphaJ']   : (default 1) scaling coefficient for the null space
        step xiJ decreasing the objective function

    params['alphaC']   : (default 1) scaling coefficient for the Gauss Newton
        step xiC decreasing the violation of the constraints

    params['alphas']   : (optional) vector of dimension
        problem.nconstraints + problem.nineqconstraints containing
        proportionality coefficients scaling the Gauss Newton direction xiC for
        each of the constraints

    params['debug'] : Tune the verbosity of the output (default 0)
                      Set param['debug']=-1 to display only the final result
                      Set param['debug']=-2 to remove any output

    params['dt'] : (default : `1.0`). Pseudo time-step expressed in a time unit.
        Used to modulate the optimization convergence/oscillatory behavior.

    params['hmin'] : (default : `1.0`). Mesh minimum length. TODO Replace this for dt
        in the calculation of the tolerances `eps`

    params['K']: tunes the distance at which inactive inequality constraints
        are felt. Constraints are felt from a distance K*params['dt']

    params['maxit']    : Maximal number of iterations (default : 4000)

    params['maxtrials']: (default 3) number of trials in between time steps
        until the merit function decreases

    params['tol']      : (default 1e-7) Algorithm stops when
            ||phi_{n+1}-phi_n||<params['tol']
        or after params['maxit'] iterations.

    params['tol_merit'] : (default 0) a new iterate phi_{n+1} is accepted if
        merit(phi_{n+1})<(1+sign(merit(phi_n)*params['tol_merit']))*merit(phi_n)

    params['tol_qp'] : (default 1e-20) the tolerance for the qp solver cvxopt

    params['show_progress_qp'] : (default False) If true, then the output of
        cvxopt will be displayed between iterations.

    params['monitor_time'] : (default False) If true, then the output of
        the time taken between optimization iterations.

    """

    params = set_parameters(params)

    alphas = np.asarray(
        params.get(
            "alphas",
            [1] * (len(problem.eqconstraints) + len(problem.ineqconstraints)),
        ))

    if descent_output_dir:
        descent_pvd = File(f"{descent_output_dir}/descent_direction.pvd")

    results = {
        "phi": [],
        "J": [],
        "G": [],
        "H": [],
        "muls": [],
        "merit": [],
    }

    phi = problem.x0()

    (J, G, H) = problem.eval(phi)

    normdx = 1  # current value for x_{n+1}-x_n

    new_phi = Function(phi.function_space())
    orig_phi = Function(phi.function_space())
    while normdx > params["tol"] and len(results["J"]) < params["maxit"]:
        with MPITimer(phi.comm) as timings:

            results["J"].append(J)
            results["G"].append(G)
            results["H"].append(H)

            if problem.accept():
                break

            it = len(results["J"]) - 1
            display("\n", params["debug"], 1)
            display(
                f"{it}. J=" + format(J, ".4g") + " " + "G=[" +
                ",".join(format(phi, ".4g") for phi in G[:10]) + "] " + "H=[" +
                ",".join(format(phi, ".4g") for phi in H[:10]) + "] " +
                " ||dx||_V=" + format(normdx, ".4g"),
                params["debug"],
                0,
            )

            # Returns the gradients (in the primal space). They are
            # firedrake.Function's
            (dJ, dG, dH) = problem.eval_gradients(phi)
            dC = dG + dH

            H = np.asarray(H)
            G = np.asarray(G)
            C = np.concatenate((G, H))

            # Obtain the tolerances for the inequality constraints and the indices
            # for the violated constraints
            eps = getEps(
                dC,
                problem.n_eqconstraints,
                params["dt"],
                params["K"],
                norm_type=params["normalisation_norm"],
            )
            tildeEps = getTilde(C, problem.n_eqconstraints, eps=eps)
            print(f"eps: {eps}")
            # Obtain the violated contraints
            tilde = getTilde(C, problem.n_eqconstraints)

            p_matrix = p_matrix_eval(dC, tildeEps)
            q_vector = q_vector_eval(dJ, dC, tildeEps)
            qp_results = solve_dual_problem(
                p_matrix,
                q_vector,
                tildeEps,
                problem.n_eqconstraints,
                show_progress_qp=params["show_progress_qp"],
                tol_qp=params["tol_qp"],
            )
            muls = np.zeros(len(C))
            oldmuls = np.zeros(len(C))
            hat = np.asarray([False] * len(C))

            if qp_results:
                muls[tildeEps] = np.asarray(qp_results["x"]).flatten()
                oldmuls = muls.copy()
                hat = np.asarray([True] * len(C))
                hat[problem.n_eqconstraints:] = (muls[problem.n_eqconstraints:]
                                                 > 30 * params["tol_qp"])
                if params.get("disable_dual", False):
                    hat = tildeEps

                dCdCT = dCdCT_eval(dC, hat)
                dCdCTinv = invert_dCdCT(dCdCT, params["debug"])
                muls = np.zeros(len(C))

                dCdJ = dCdJ_eval(dJ, dC, hat)
                muls[hat] = -dCdCTinv.dot(dCdJ[hat])

                if not np.all(muls[problem.n_eqconstraints:] >= 0):
                    display(
                        "Warning, the active set has not been predicted " +
                        "correctly Using old lagrange multipliers",
                        params["debug"],
                        level=1,
                        color="orange_4a",
                    )
                    hat = np.asarray([True] * len(C))
                    muls = oldmuls.copy()

            results["muls"].append(muls)
            display(f"Lagrange multipliers: {muls[:10]}",
                    params["debug"],
                    level=5)
            xiJ = xiJ_eval(dJ, dC, muls, hat)

            # Set of constraints union of active and new violated constraints.
            indicesEps = np.logical_or(tilde, hat)
            dCdCT = dCdCT_eval(dC, indicesEps)
            dCtdCtTinv = invert_dCdCT(dCdCT, params["debug"])

            xiC = xiC_eval(C, dC, dCtdCtTinv, alphas, indicesEps)

            # TODO Consider this? AC = min(0.9, alphaC * dt / max(compute_norm(xiC), 1e-9))
            AJ = params["alphaJ"]
            AC = params["alphaC"]

            # Make updates with merit function
            if xiC:
                problem.delta_x.assign(
                    Constant(-AJ) * xiJ - Constant(AC) * xiC)
            else:
                problem.delta_x.assign(Constant(-AJ) * xiJ)
            normdx = fd.norm(problem.delta_x)

            merit_eval_new = partial(merit_eval, muls, indicesEps, dCtdCtTinv)
            merit = merit_eval_new(AJ, J, AC, C)
            results["merit"].append(merit)
            if len(results["merit"]) > 3:
                print(
                    f"Merit oscillation: {(results['merit'][-1] - results['merit'][-2]) * (results['merit'][-2] - results['merit'][-3])}"
                )

            if descent_output_dir:
                descent_pvd.write(problem.delta_x)

            orig_phi.assign(phi)
            new_phi, newJ, newG, newH = line_search(
                problem,
                orig_phi,
                new_phi,
                merit_eval_new,
                merit,
                AJ,
                AC,
                dt=params["dt"],
                maxtrials=params["maxtrials"],
                tol_merit=params["tol_merit"],
                debug=params["debug"],
            )
            phi.assign(new_phi)
            (J, G, H) = (newJ, newG, newH)

        if params["monitor_time"]:
            print(
                f"Max time per iteration: {timings.max_time}, min time per iteration: {timings.min_time}"
            )

    results["J"].append(J)
    results["G"].append(G)
    results["H"].append(H)

    display("\n", params["debug"], -1)
    display("Optimization completed.", params["debug"], -1)
    return results
Beispiel #14
0
    def setup_dump(self, tmax, pickup=False):
        """
        Setup dump files
        Check for existence of directory so as not to overwrite
        output files
        Setup checkpoint file

        :arg tmax: model stop time
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """
        self.dumpdir = path.join("results", self.output.dirname)
        outfile = path.join(self.dumpdir, "field_output.pvd")
        if self.mesh.comm.rank == 0 and "pytest" not in self.output.dirname \
           and path.exists(self.dumpdir) and not pickup:
            raise IOError("results directory '%s' already exists" %
                          self.dumpdir)
        self.dumpcount = itertools.count()
        self.dumpfile = File(outfile,
                             project_output=self.output.project_fields,
                             comm=self.mesh.comm)
        if self.output.checkpoint and not pickup:
            self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                        mode=FILE_CREATE)

        # make list of fields to dump
        self.to_dump = [field for field in self.fields if field.dump]

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh and make output file
        if len(self.output.dumplist_latlon) > 0:
            mesh_ll = get_latlon_mesh(self.mesh)
            outfile_ll = path.join(self.dumpdir, "field_output_latlon.pvd")
            self.dumpfile_ll = File(outfile_ll,
                                    project_output=self.output.project_fields,
                                    comm=self.mesh.comm)

        # make list of fields to pickup (this doesn't include diagnostic fields)
        self.to_pickup = [field for field in self.fields if field.pickup]

        # make functions on latlon mesh, as specified by dumplist_latlon
        self.to_dump_latlon = []
        for name in self.output.dumplist_latlon:
            f = self.fields(name)
            field = Function(functionspaceimpl.WithGeometry(
                f.function_space(), mesh_ll),
                             val=f.topological,
                             name=name + '_ll')
            self.to_dump_latlon.append(field)

        # we create new netcdf files to write to, unless pickup=True, in
        # which case we just need the filenames
        if self.output.dump_diagnostics:
            diagnostics_filename = self.dumpdir + "/diagnostics.nc"
            self.diagnostic_output = DiagnosticsOutput(diagnostics_filename,
                                                       self.diagnostics,
                                                       self.output.dirname,
                                                       create=not pickup)

        if len(self.output.point_data) > 0:
            pointdata_filename = self.dumpdir + "/point_data.nc"

            ndt = int(tmax / self.timestepping.dt)
            self.pointdata_output = PointDataOutput(pointdata_filename,
                                                    ndt,
                                                    self.output.point_data,
                                                    self.output.dirname,
                                                    self.fields,
                                                    create=not pickup)
Beispiel #15
0
    def __init__(self,
                 prognostic_variables,
                 diagnostic_variables,
                 simulation_parameters,
                 peakon_equations=None,
                 diagnostic_values=None):

        self.ndump = simulation_parameters['ndump'][-1]
        self.field_ndump = simulation_parameters['field_ndump'][-1]
        self.prognostic_variables = prognostic_variables
        self.diagnostic_variables = diagnostic_variables
        self.simulation_parameters = simulation_parameters
        self.peakon_equations = peakon_equations
        file_name = simulation_parameters['file_name'][-1]
        dirname = simulation_parameters['dirname'][-1]
        self.data_file = Dataset(file_name, 'a')

        # set up things for dumping diagnostics
        if diagnostic_values is not None:
            if isinstance(diagnostic_values, str):
                dimensions = self.data_file[diagnostic_values].dimensions
            else:
                if diagnostic_values[0] != 'mu':
                    dimensions = self.data_file[
                        diagnostic_values[0]].dimensions
                else:
                    dimensions = self.data_file[diagnostic_values[0] + '_' +
                                                str(0)].dimensions

        index_list = []
        variable_list = []
        for dimension in dimensions:
            if dimension not in ('time', 'x'):
                variable_list.append(dimension)
                index_list.append(simulation_parameters[dimension][0])

        self.index_slices = [slice(index, index + 1) for index in index_list]
        self.t_idx = 0
        self.diagnostic_values = diagnostic_values
        self.list_of_peakon_diagnostics = [
            'peakon_loc', 'peakon_min_du', 'peakon_max_du',
            'peakon_min_du_loc', 'peakon_max_du_loc', 'peakon_max_u',
            'peakon_mu', 'peakon_nu'
        ]

        # set up things for dumping fields
        if self.field_ndump > 0:
            field_file_name = dirname + '/fields'
            for dimension, index in zip(variable_list, index_list):
                field_file_name += '_' + str(dimension) + str(index)
            field_file_name += '_output.pvd'
            self.field_file = File(field_file_name)

            prognostic_variables = [
                value for value in self.prognostic_variables.fields.values()
            ]
            diagnostic_variables = [
                value
                for value in self.diagnostic_variables.dumpfields.values()
            ]
            self.dumpfields = prognostic_variables + diagnostic_variables

        self.out_string = ''
        for key, value in simulation_parameters.items():
            if len(value) > 1:
                self.out_string += str(key) + ' = %s, ' % str(value[0])

        # write initial wallclock time
        self.data_file['wallclock_time'][
            [slice(0, 1)] + self.index_slices] = datetime.now().timestamp()

        # set up coordinate field
        if self.simulation_parameters['store_coordinates'][-1]:
            self.data_file[
                'x'][:] = self.diagnostic_variables.coords.dat.data[:]
Beispiel #16
0
            str(LC1), "-setnumber", "lc2",
            str(LC2), "mesh/hexa.geo"
        ])

        permittivity_dict = {1: 1, 2: 11.8, 3: 1}
        s = np.array([1, 2])
        p = np.array([-2, 1])
        k0L = np.pi

        print("Isotropic Scattering with permittivity {} and n {}".format(
            permittivity_dict, i))
        problem = IsotropicScattering(mesh_file, permittivity_dict, k0L)
        pw = PlaneWave(s, p)

        E_isotropic = problem.solve(pw)
        File(f"results/isotropic_{i}.pvd").write(E_isotropic)

        phi, FF_isotropic = problem.get_far_field(E_isotropic,
                                                  FAR_FIELD_POINTS)
        np.save(f"results/ff_isotropic-{i}.npy", FF_isotropic)

        epsilon = [[5.46549124, 0], [0, 5.7717177]]

        permittivity_dict = {1: epsilon, 2: epsilon, 3: np.identity(2)}
        print("Anisotropic Scattering with permittivity {} and n {}".format(
            permittivity_dict, i))
        problem = AnisotropicScattering(problem.mesh, permittivity_dict, k0L)
        E_anisotropic = problem.solve(pw)
        File(f"results/anisotropic_{i}.pvd").write(E_anisotropic)

        _, FF_anisotropic = problem.get_far_field(E_anisotropic,
Beispiel #17
0
def do_simulation_loop(N,
                       variable_parameters,
                       simulation_parameters,
                       diagnostics=None,
                       fields_to_output=None,
                       expected_u=False):
    """
    A recursive strategy for setting up a variable number of for loops
    for the experiment.

    :arg N: the level of loop.
    :arg variable_parameters: an OrderedDict of the parameters to be varied.
    :arg simulation_parameters: a dict storing the parameters for that simulation.
    :arg diagnostics: a list of diagnostic values to be output.
    :arg fields_to_output: a list of fields to be output.
    """

    # we do the loop in reverse order to get the resolution loop on the outside
    M = len(variable_parameters)
    key = list(variable_parameters.items())[M - N][0]
    have_setup = False

    # we must turn the ordered dict into a list to iterate through it
    for index, value in enumerate(
            list(variable_parameters.items())[M - N][1][1]):
        simulation_parameters[key] = (index, value)

        # make mesh if loop is a resolution loop
        if key == 'resolution':
            mesh = PeriodicIntervalMesh(value, simulation_parameters['Ld'][-1])
            simulation_parameters['mesh'] = (mesh, )

        # do recursion if we aren't finished yet
        if N > 1:
            do_simulation_loop(N - 1,
                               variable_parameters,
                               simulation_parameters,
                               diagnostics=diagnostics,
                               fields_to_output=fields_to_output,
                               expected_u=expected_u)

        # finally do simulation
        elif N == 1:

            if expected_u:
                this_u = simulation(simulation_parameters,
                                    diagnostic_values=diagnostics,
                                    fields_to_output=fields_to_output,
                                    expected_u=True)
                if have_setup:
                    Eu.assign(counter * Eu + this_u)
                    counter.assign(counter + 1)
                    Eu.assign(Eu / counter)
                else:
                    scheme = simulation_parameters['scheme'][-1]
                    mesh = simulation_parameters['mesh'][-1]
                    prognostic_variables = PrognosticVariables(scheme, mesh)
                    Eu = Function(prognostic_variables.Vu,
                                  name='expected u').assign(this_u)
                    counter = Constant(1.0)
                    have_setup = True
            else:
                simulation(simulation_parameters,
                           diagnostic_values=diagnostics,
                           fields_to_output=fields_to_output)

    if expected_u:
        expected_u_file = File(simulation_parameters['dirname'][-1] +
                               '/expected_u.pvd')
        expected_u_file.write(Eu)
Beispiel #18
0
class Outputting(object):
    """
    An object for outputting diagnostics and fields.

    :arg prognostic_variables: a PrognosticVariables object.
    :arg diagnostic_variables: a DiagnosticVariables object.
    :arg simulation_parameters: a dictionary containing the simulation parameters.
    :arg diagnostic_values: a list of diagnostics to use.
    """
    def __init__(self,
                 prognostic_variables,
                 diagnostic_variables,
                 simulation_parameters,
                 peakon_equations=None,
                 diagnostic_values=None):

        self.ndump = simulation_parameters['ndump'][-1]
        self.field_ndump = simulation_parameters['field_ndump'][-1]
        self.prognostic_variables = prognostic_variables
        self.diagnostic_variables = diagnostic_variables
        self.simulation_parameters = simulation_parameters
        self.peakon_equations = peakon_equations
        file_name = simulation_parameters['file_name'][-1]
        dirname = simulation_parameters['dirname'][-1]
        self.data_file = Dataset(file_name, 'a')

        # set up things for dumping diagnostics
        if diagnostic_values is not None:
            if isinstance(diagnostic_values, str):
                dimensions = self.data_file[diagnostic_values].dimensions
            else:
                if diagnostic_values[0] != 'mu':
                    dimensions = self.data_file[
                        diagnostic_values[0]].dimensions
                else:
                    dimensions = self.data_file[diagnostic_values[0] + '_' +
                                                str(0)].dimensions

        index_list = []
        variable_list = []
        for dimension in dimensions:
            if dimension not in ('time', 'x'):
                variable_list.append(dimension)
                index_list.append(simulation_parameters[dimension][0])

        self.index_slices = [slice(index, index + 1) for index in index_list]
        self.t_idx = 0
        self.diagnostic_values = diagnostic_values
        self.list_of_peakon_diagnostics = [
            'peakon_loc', 'peakon_min_du', 'peakon_max_du',
            'peakon_min_du_loc', 'peakon_max_du_loc', 'peakon_max_u',
            'peakon_mu', 'peakon_nu'
        ]

        # set up things for dumping fields
        if self.field_ndump > 0:
            field_file_name = dirname + '/fields'
            for dimension, index in zip(variable_list, index_list):
                field_file_name += '_' + str(dimension) + str(index)
            field_file_name += '_output.pvd'
            self.field_file = File(field_file_name)

            prognostic_variables = [
                value for value in self.prognostic_variables.fields.values()
            ]
            diagnostic_variables = [
                value
                for value in self.diagnostic_variables.dumpfields.values()
            ]
            self.dumpfields = prognostic_variables + diagnostic_variables

        self.out_string = ''
        for key, value in simulation_parameters.items():
            if len(value) > 1:
                self.out_string += str(key) + ' = %s, ' % str(value[0])

        # write initial wallclock time
        self.data_file['wallclock_time'][
            [slice(0, 1)] + self.index_slices] = datetime.now().timestamp()

        # set up coordinate field
        if self.simulation_parameters['store_coordinates'][-1]:
            self.data_file[
                'x'][:] = self.diagnostic_variables.coords.dat.data[:]

    def dump_diagnostics(self, t, failed=False):
        """
        Dump the diagnostic values.

        :arg t: time.
        """

        print(self.out_string, 't = %.3f' % t)
        self.data_file.variables['time'][self.t_idx:self.t_idx + 1] = t

        u = self.prognostic_variables.u
        if 'm' in self.prognostic_variables.fields.keys():
            m = self.prognostic_variables.m
        elif 'm' in self.diagnostic_variables.fields.keys():
            m = self.diagnostic_variables.fields['m']

        alphasq = self.simulation_parameters['alphasq'][-1]
        Ld = self.simulation_parameters['Ld'][-1]
        periodic = self.simulation_parameters['periodic'][-1]
        # Need to evaluate periodic factor because we need a number from Firedrake constants
        periodic_factor = (1 - exp(-Ld / sqrt(alphasq)))
        periodic_factor = periodic_factor.evaluate(0, 0, 0, 0)

        for diagnostic in self.diagnostic_values:
            if failed:
                output = [[np.nan], [np.nan]] if diagnostic == 'mu' else np.nan
            elif diagnostic == 'energy':
                output = assemble(
                    (dot(u, u) + alphasq * dot(u.dx(0), u.dx(0))) * dx)
            elif diagnostic == 'l2_m':
                output = norm(m, norm_type='L2')
            elif diagnostic == 'l2_u':
                output = norm(u, norm_type='L2')
            elif diagnostic == 'h1_u':
                output = norm(u, norm_type='H1')
            elif diagnostic == 'h1_m':
                output = norm(m, norm_type='H1')
            elif diagnostic == 'mass_m':
                output = assemble(m * dx)
            elif diagnostic == 'mass_m2':
                output = assemble(m * m * dx)
            elif diagnostic == 'mass_u':
                output = assemble(u * dx)
            elif diagnostic == 'min_u':
                output = 1 if norm(u_min, norm_type='L2') > 1e-10 else 0
            elif diagnostic == 'max_jump_local':
                output = find_max(self.diagnostic_variables.fields['jump_du'],
                                  self.diagnostic_variables.coords)[0]
            elif diagnostic == 'max_jump_global':
                output = find_max(
                    self.diagnostic_variables.fields['du'],
                    self.diagnostic_variables.coords)[0] - find_min(
                        self.diagnostic_variables.fields['du'],
                        self.diagnostic_variables.coords)[0]
            elif diagnostic == 'max_du_loc':
                output = find_max(self.diagnostic_variables.fields['du'],
                                  self.diagnostic_variables.coords)[1]
            elif diagnostic == 'min_du_loc':
                output = find_min(self.diagnostic_variables.fields['du'],
                                  self.diagnostic_variables.coords)[1]
            elif diagnostic == 'max_du_smooth_loc':
                output = find_max(
                    self.diagnostic_variables.fields['du_smooth'],
                    self.diagnostic_variables.smooth_coords)[1]
            elif diagnostic == 'min_du_smooth_loc':
                output = find_min(
                    self.diagnostic_variables.fields['du_smooth'],
                    self.diagnostic_variables.smooth_coords)[1]
            elif diagnostic == 'max_du':
                output = np.max(
                    self.diagnostic_variables.fields['du'].dat.data[:])
            elif diagnostic == 'min_du':
                output = np.min(
                    self.diagnostic_variables.fields['du'].dat.data[:])
            elif diagnostic == 'mu':
                old_mu, alt_mu = find_mus(
                    u, self.diagnostic_variables.fields['du'],
                    self.diagnostic_variables.coords)
                output = [old_mu, alt_mu]
            elif diagnostic == 'a':
                output = self.diagnostic_variables.fields['a'].at(
                    self.data_file['x'][:], tolerance=1e-6)
            elif diagnostic == 'b':
                output = self.diagnostic_variables.fields['b'].at(
                    self.data_file['x'][:], tolerance=1e-6)
            elif diagnostic == 'l2_kdv_1':
                output = norm(self.diagnostic_variables.fields['kdv_1'],
                              norm_type='L2')
            elif diagnostic == 'l2_kdv_2':
                output = norm(self.diagnostic_variables.fields['kdv_2'],
                              norm_type='L2')
            elif diagnostic == 'l2_kdv_3':
                output = norm(self.diagnostic_variables.fields['kdv_3'],
                              norm_type='L2')
            elif diagnostic == 'p_pde':
                if periodic:
                    output = np.max(u.dat.data[:]) * periodic_factor
                else:
                    output = np.max(u.dat.data[:])
            elif diagnostic == 'q_pde':
                output = self.diagnostic_variables.coords.dat.data[np.argmax(
                    u.dat.data[:])]
            elif diagnostic == 'm_max':
                output = np.max(m.dat.data[:])
            elif diagnostic == 'E_0':
                # use the calculated du
                du = self.diagnostic_variables.fields['du']
                output = assemble(0.5 * (u**2 + alphasq * du**2) * dx)
            elif diagnostic == 'E_1':
                # just straightforwardly use u
                output = assemble(0.5 * (u**2 + alphasq * u.dx(0)**2) * dx)
            elif diagnostic == 'E_2':
                # use m
                output = assemble(0.5 * u * m * dx)
            elif diagnostic == 'E_3':
                # solve for uxx
                u_xx = self.diagnostic_variables.fields['u_xx']
                output = assemble(0.5 * u * (u + alphasq * u_xx) * dx)
            elif diagnostic == 'u_error_with_sde':
                u_hat = self.diagnostic_variables.fields['u_sde']
                output = errornorm(u, u_hat)
            elif diagnostic == 'u_error_weak':
                u_hat_weak = self.diagnostic_variables.fields['u_sde_weak']
                output = norm(u_hat_weak)
            elif diagnostic == 'u_error_with_sde_mean':
                u_hat = self.diagnostic_variables.fields['u_sde_mean']
                output = errornorm(u, u_hat)
            elif diagnostic == 'u_error_weak_mean':
                u_hat_weak = self.diagnostic_variables.fields[
                    'u_sde_weak_mean']
                output = norm(u_hat_weak)
            elif diagnostic == 'u_field':
                output = u.dat.data[:]
            elif diagnostic == 'peakon_suite':
                output = peakon_diagnostics(
                    u, self.diagnostic_variables.fields['du'],
                    self.diagnostic_variables.coords)
            else:
                raise ValueError('Diagnostic %s not recgonised.' % diagnostic)

            if diagnostic in ('a', 'b'):
                self.data_file[diagnostic][[slice(self.t_idx, self.t_idx + 1)]
                                           + self.index_slices] = output
            elif diagnostic == 'mu':
                # we cannot store arrays of mus, so have to do them each separately
                for i in range(4):
                    if i < len(output[0]):
                        # output[0] is the old mu, output[1] is the new mu
                        self.data_file[diagnostic + '_' + str(i)][
                            [slice(self.t_idx, self.t_idx + 1)] +
                            self.index_slices] = output[0][i]
                        self.data_file['alt_' + diagnostic + '_' + str(i)][
                            [slice(self.t_idx, self.t_idx + 1)] +
                            self.index_slices] = output[1][i]
                    else:
                        self.data_file[diagnostic + '_' + str(i)][
                            [slice(self.t_idx, self.t_idx + 1)] +
                            self.index_slices] = np.nan
                        self.data_file['alt_' + diagnostic + '_' + str(i)][
                            [slice(self.t_idx, self.t_idx + 1)] +
                            self.index_slices] = np.nan
            elif diagnostic == 'peakon_suite':
                for peakon_diag in self.list_of_peakon_diagnostics:
                    self.data_file[peakon_diag][
                        [slice(self.t_idx, self.t_idx + 1)] +
                        self.index_slices] = output[peakon_diag]
            else:
                self.data_file[diagnostic][[slice(self.t_idx, self.t_idx + 1)]
                                           + self.index_slices] = output

        if self.peakon_equations is not None:
            self.data_file['p'][[slice(self.t_idx, self.t_idx + 1)] +
                                self.index_slices] = self.peakon_equations.p
            self.data_file['q'][[slice(self.t_idx, self.t_idx + 1)] +
                                self.index_slices] = self.peakon_equations.q

        self.t_idx += 1

    def dump_fields(self, t):
        """
        Dump the diagnostic fields.

        :arg t: time.
        """

        if self.field_ndump > 0:
            self.field_file.write(*self.dumpfields, time=t)

    def store_times(self, failed_time):
        """
        Store the final wallclock time and the time at which the run failed.
        """

        self.data_file['wallclock_time'][
            [slice(1, 2)] + self.index_slices] = datetime.now().timestamp()
        if len(self.index_slices) == 0:
            self.data_file['failed_time'][0] = failed_time
        else:
            self.data_file['failed_time'][self.index_slices] = failed_time
Beispiel #19
0
class SeaIceModel(object):
    def __init__(self, mesh, conditions, timestepping, params, output, solver_params):

        self.timestepping = timestepping
        self.timestep = timestepping.timestep
        self.timescale = timestepping.timescale
        self.params = params
        if output is None:
            raise RuntimeError("You must provide a directory name for dumping results")
        else:
            self.output = output
        self.outfile = File(output.dirname)
        self.dump_count = 0
        self.dump_freq = output.dumpfreq
        self.solver_params = solver_params
        self.mesh = mesh
        self.conditions = conditions

        if conditions.steady_state == True:
            self.ind = 1
        else:
            self.ind = 1
        
        family = conditions.family
        self.x, self.y = SpatialCoordinate(mesh)
        self.n = FacetNormal(mesh)
        self.V = VectorFunctionSpace(mesh, family, conditions.order + 1)
        self.U = FunctionSpace(mesh, family, conditions.order + 1)
        self.U1 = FunctionSpace(mesh, 'DG', conditions.order)
        self.S = TensorFunctionSpace(mesh, 'DG', conditions.order)
        self.D = FunctionSpace(mesh, 'DG', 0)
        self.W1 = MixedFunctionSpace([self.V, self.S])
        self.W2 = MixedFunctionSpace([self.V, self.U1, self.U1])
        self.W3 = MixedFunctionSpace([self.V, self.S, self.U1, self.U1])


    def Ice_Strength(self, h, a):
        return self.params.P_star * h * exp(-self.params.C * (1 - a))

    def zeta(self, h, a, delta):
        return 0.5 * self.Ice_Strength(h, a) / delta

    def strain(self, omega):
        return 0.5 * (omega + transpose(omega))

    def delta(self, u):
        return sqrt(self.params.Delta_min ** 2 + 2 * self.params.e ** (-2) * inner(dev(self.strain(grad(u))),
                                                                                   dev(self.strain(grad(u)))) + tr(
            self.strain(grad(u))) ** 2)

    def bcs(self, space, location="on_boundary"):
        return [DirichletBC(space, values, location) for values in self.conditions.bc]

    def solve(self, *args):
        for solvers in args:
            solvers.solve()

    def update(self, old_var, new_var):
        old_var.assign(new_var)

    def dump(self, *args, t):
        self.dump_count += 1
        if self.dump_count == self.dump_freq:
            self.dump_count -= self.dump_freq
            self.outfile.write(*args, time=t)

    def initial_condition(self, *args):
        '''
        arguments should be put in order (variable1, ic1), (variable2, ic2), etc.
        '''
        for vars, ics in args:
            if isinstance(ics, (int, float)) or type(ics) == 'ufl.tensors.ListTensor':
                vars.assign(ics)
            else:
                vars.interpolate(ics)


    def assemble(self, eqn, func, bcs, params):
        uprob = NonlinearVariationalProblem(eqn, func, bcs)
        self.usolver = NonlinearVariationalSolver(uprob, solver_parameters=params)

    def progress(self, t):
        print("Time:", t, "[s]")
        print(int(min(t / self.timescale * 100, 100)), "% complete")

    def momentum_equation(self, hh, u1, u0, p, sigma, rho, uh, ocean_curr, rho_a, C_a, rho_w, C_w, geo_wind, cor, timestep, ind=1):
        def momentum_term():
            return inner(rho * hh * (u1 - u0), p) * dx

        def forcing_term():
            return inner(rho * hh * cor * perp(ocean_curr - uh), p) * dx

        def stress_term(density, drag, func):
            return inner(density * drag * sqrt(dot(func, func)) * func, p) * dx(degree=3)
    
        def rheology_term():
            return inner(sigma, grad(p)) * dx

        return ind * momentum_term() + timestep * (rheology_term() - forcing_term()
                                                   - stress_term(rho_w, C_w, ocean_curr - uh)
                                                   - stress_term(rho_a, C_a, geo_wind))

    def transport_equation(self, uh, hh, ah, h1, h0, a1, a0, q, r, n, timestep):
   
        def in_term(var1, var2, test):
            trial = var2 - var1
            return test * trial * dx

        def upwind_term(var1, test):
            un = 0.5 * (dot(uh, n) + abs(dot(uh, n)))
            return timestep * (var1 * div(test * uh) * dx
                               - (test('+') - test('-')) * (un('+') * ah('+') - un('-') * var1('-')) * dS)
    
        return in_term(h0, h1, q) - upwind_term(hh, q) + in_term(a0, a1, r) - upwind_term(ah, r)

    def stabilisation_term(self, alpha, zeta, mesh, v, test):
        e = avg(CellVolume(mesh)) / FacetArea(mesh)
        return 2 * alpha * zeta / e * (dot(jump(v), jump(test))) * dS
Beispiel #20
0
def test_forward_3d(tf=0.6):
    model = {}

    model["opts"] = {
        "method": "KMV",  # either CG or KMV
        "quadrature": "KMV",  # Equi or KMV
        "degree": 3,  # p order
        "dimension": 3,  # dimension
    }
    model["parallelism"] = {"type": "automatic"}  # automatic",
    model["mesh"] = {
        "Lz": 5.175,  # depth in km - always positive
        "Lx": 7.50,  # width in km - always positive
        "Ly": 7.50,  # thickness in km - always positive
        "meshfile": "meshes/overthrust_3D_true_model.msh",
        "initmodel": "velocity_models/overthrust_3D_guess_model.hdf5",
        "truemodel": "velocity_models/overthrust_3D_true_model.hdf5",
    }
    model["BCs"] = {
        "status": True,  # True or false
        "outer_bc":
        "non-reflective",  #  None or non-reflective (outer boundary condition)
        "damping_type":
        "polynomial",  # polynomial, hyperbolic, shifted_hyperbolic
        "exponent": 2,  # damping layer has a exponent variation
        "cmax": 6.0,  # maximum acoustic wave velocity in PML - km/s
        "R": 1e-6,  # theoretical reflection coefficient
        "lz":
        0.75,  # thickness of the PML in the z-direction (km) - always positive
        "lx":
        0.75,  # thickness of the PML in the x-direction (km) - always positive
        "ly":
        0.75,  # thickness of the PML in the y-direction (km) - always positive
    }
    model["acquisition"] = {
        "source_type":
        "Ricker",
        "source_pos": [(-0.15, 0.25, 0.25)],
        "frequency":
        5.0,
        "delay":
        1.0,
        "receiver_locations": [(-0.15, 0.25, 0.25), (-0.15, 0.3, 0.25),
                               (-0.15, 0.35, 0.25), (-0.15, 0.4, 0.25),
                               (-0.15, 0.45, 0.25), (-0.15, 0.5, 0.25),
                               (-0.15, 0.55, 0.25), (-0.15, 0.6, 0.25)],
    }
    model["aut_dif"] = {"status": False}
    model["timeaxis"] = {
        "t0": 0.0,  #  Initial time for event
        "tf": tf,  # Final time for event
        "dt": 0.00075,
        "amplitude": 1,  # the Ricker has an amplitude of 1.
        "nspool": 100,  # how frequently to output solution to pvds
        "fspool": 99999,  # how frequently to save solution to RAM
    }

    comm = spyro.utils.mpi_init(model)
    mesh, V = spyro.io.read_mesh(model, comm)
    vp = spyro.io.interpolate(model, mesh, V, guess=False)

    if comm.ensemble_comm.rank == 0:
        File("true_velocity.pvd", comm=comm.comm).write(vp)

    sources = spyro.Sources(model, mesh, V, comm)
    receivers = spyro.Receivers(model, mesh, V, comm)
    wavelet = spyro.full_ricker_wavelet(
        dt=model["timeaxis"]["dt"],
        tf=model["timeaxis"]["tf"],
        freq=model["acquisition"]["frequency"],
    )

    p, p_r = spyro.solvers.forward(model,
                                   mesh,
                                   comm,
                                   vp,
                                   sources,
                                   wavelet,
                                   receivers,
                                   output=False)

    dt = model["timeaxis"]["dt"]
    final_time = model["timeaxis"]["tf"]

    pass_error_test = True

    if comm.comm.rank == 0:
        error_percent = compare_velocity(p_r, 0, 7, model, dt)
        if error_percent < 5:
            pass_error_test = True
        else:
            pass_error_test = False

    assert pass_error_test
Beispiel #21
0
    def dump(self, t=0, pickup=False):
        """
        Dump output
        :arg t: the current model time (default is zero).
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """

        # default behaviour is to dump all prognostic fields
        if self.output.dumplist is None:
            self.output.dumplist = self.fieldlist

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh
        if len(self.output.dumplist_latlon) > 0:
            field_dict_ll = {}
            mesh_ll = get_latlon_mesh(self.mesh)

        funcs = self.xn.split()
        field_dict = {name: func for (name, func) in zip(self.fieldlist, funcs)}
        to_dump = []  # fields to output to dump and checkpoint
        to_pickup = []  # fields to pick up from checkpoint
        for name, f in field_dict.iteritems():
            if name in self.output.dumplist:
                to_dump.append(f)
                to_pickup.append(f)
            f.rename(name=name)

        # append diagnostic fields for to_dump
        for diagnostic in self.diagnostic_fields:
            to_dump.append(diagnostic(self))

        # check if we are running a steady state simulation and if so
        # set up the error fields and save the
        # initial fields so that we can compute the error fields
        steady_state_dump_err = defaultdict(bool)
        steady_state_dump_err.update(self.output.steady_state_dump_err)
        for name, f, f_init in zip(self.fieldlist, funcs, self.x_init.split()):
            if steady_state_dump_err[name]:
                err = Function(f.function_space(), name=name+'err').assign(f-f_init)
                field_dict[name+"err"] = err
                self.diagnostics.register(name+"err")
                to_dump.append(err)
                f_init.rename(f.name()+"_init")
                to_dump.append(f_init)
                to_pickup.append(f_init)

        # check if we are dumping perturbation fields. If we are, the
        # meanfields are provided in a dictionary. Here we set up the
        # perturbation fields.
        meanfields = defaultdict(lambda: None)
        meanfields.update(self.output.meanfields)
        for name, meanfield in meanfields.iteritems():
            if meanfield is not None:
                field = field_dict[name]
                diff = Function(
                    field.function_space(),
                    name=field.name()+"_perturbation").assign(field - meanfield)
                self.diagnostics.register(name+"perturbation")
                field_dict[name+"perturbation"] = diff
                to_dump.append(diff)
            mean_name = field.name() + "_bar"
            meanfield.rename(name=mean_name)
            to_dump.append(meanfield)
            to_pickup.append(meanfield)

        # make functions on latlon mesh, as specified by dumplist_latlon
        to_dump_latlon = []
        for name in self.output.dumplist_latlon:
            f = field_dict[name]
            f_ll = Function(functionspaceimpl.WithGeometry(f.function_space(), mesh_ll), val=f.topological, name=name+'_ll')
            field_dict_ll[name] = f_ll
            to_dump_latlon.append(f_ll)

        self.dumpdir = path.join("results", self.output.dirname)
        outfile = path.join(self.dumpdir, "field_output.pvd")
        if self.dumpfile is None:
            if self.mesh.comm.rank == 0 and path.exists(self.dumpdir) and not pickup:
                exit("results directory '%s' already exists" % self.dumpdir)
            self.dumpcount = itertools.count()
            self.dumpfile = File(outfile, project_output=self.output.project_fields, comm=self.mesh.comm)
            self.diagnostic_data = defaultdict(partial(defaultdict, list))

            # make output file for fields on latlon mesh if required
            if len(self.output.dumplist_latlon) > 0:
                outfile_latlon = path.join(self.dumpdir, "field_output_latlon.pvd")
                self.dumpfile_latlon = File(outfile_latlon, project_output=self.output.project_fields,
                                            comm=self.mesh.comm)

        if(pickup):
            # Open the checkpointing file for writing
            chkfile = path.join(self.dumpdir, "chkpt")
            with DumbCheckpoint(chkfile, mode=FILE_READ) as chk:
                # Recover all the fields from the checkpoint
                for field in to_pickup:
                    chk.load(field)
                t = chk.read_attribute("/","time")
                next(self.dumpcount)

        elif (next(self.dumpcount) % self.output.dumpfreq) == 0:

            print "DBG dumping", t

            # dump fields
            self.dumpfile.write(*to_dump)

            # dump fields on latlon mesh
            if len(self.output.dumplist_latlon) > 0:
                self.dumpfile_latlon.write(*to_dump_latlon)

            # compute diagnostics
            for name in self.diagnostics.fields:
                data = self.diagnostics.l2(field_dict[name])
                self.diagnostic_data[name]["l2"].append(data)

            # Open the checkpointing file (backup version)
            files = ["chkptbk", "chkpt"]
            for file in files:
                chkfile = path.join(self.dumpdir, file)
                with DumbCheckpoint(chkfile, mode=FILE_CREATE) as chk:
                    # Dump all the fields to a checkpoint
                    for field in to_dump:
                        chk.store(field)
                    chk.write_attribute("/","time",t)

        return t
Beispiel #22
0
class State(object):
    """
    Build a model state to keep the variables in, and specify parameters.

    :arg mesh: The :class:`Mesh` to use.
    :arg vertical_degree: integer, the degree for spaces in the vertical
    (specifies the degree for the pressure space, other spaces are inferred)
    defaults to 1.
    :arg horizontal_degree: integer, the degree for spaces in the horizontal
    (specifies the degree for the pressure space, other spaces are inferred)
    defaults to 1.
    :arg family: string, specifies the velocity space family to use.
    Options:
    "RT": The Raviart-Thomas family (default, recommended for quads)
    "BDM": The BDM family
    "BDFM": The BDFM family
    :arg timestepping: class containing timestepping parameters
    :arg output: class containing output parameters
    :arg parameters: class containing physical parameters
    :arg diagnostics: class containing diagnostic methods
    :arg fieldlist: list of prognostic field names

    """
    __metaclass__ = ABCMeta

    def __init__(self, mesh, vertical_degree=1, horizontal_degree=1,
                 family="RT", z=None, k=None, Omega=None, mu=None,
                 timestepping=None,
                 output=None,
                 parameters=None,
                 diagnostics=None,
                 fieldlist=None,
                 diagnostic_fields=[]):

        self.z = z
        self.k = k
        self.Omega = Omega
        self.mu = mu
        self.timestepping = timestepping
        self.output = output
        self.parameters = parameters
        if fieldlist is None:
            raise RuntimeError("You must provide a fieldlist containing the names of the prognostic fields")
        else:
            self.fieldlist = fieldlist
        if diagnostics is not None:
            self.diagnostics = diagnostics
        else:
            self.diagnostics = Diagnostics(*fieldlist)
        self.diagnostic_fields = diagnostic_fields

        # The mesh
        self.mesh = mesh

        # Build the spaces
        self._build_spaces(mesh, vertical_degree,
                           horizontal_degree, family)

        # Allocate state
        self._allocate_state()
        self.field_dict = {name: func for (name, func) in
                           zip(self.fieldlist, self.xn.split())}

        self.dumpfile = None

    def dump(self, t=0, pickup=False):
        """
        Dump output
        :arg t: the current model time (default is zero).
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """

        # default behaviour is to dump all prognostic fields
        if self.output.dumplist is None:
            self.output.dumplist = self.fieldlist

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh
        if len(self.output.dumplist_latlon) > 0:
            field_dict_ll = {}
            mesh_ll = get_latlon_mesh(self.mesh)

        funcs = self.xn.split()
        field_dict = {name: func for (name, func) in zip(self.fieldlist, funcs)}
        to_dump = []  # fields to output to dump and checkpoint
        to_pickup = []  # fields to pick up from checkpoint
        for name, f in field_dict.iteritems():
            if name in self.output.dumplist:
                to_dump.append(f)
                to_pickup.append(f)
            f.rename(name=name)

        # append diagnostic fields for to_dump
        for diagnostic in self.diagnostic_fields:
            to_dump.append(diagnostic(self))

        # check if we are running a steady state simulation and if so
        # set up the error fields and save the
        # initial fields so that we can compute the error fields
        steady_state_dump_err = defaultdict(bool)
        steady_state_dump_err.update(self.output.steady_state_dump_err)
        for name, f, f_init in zip(self.fieldlist, funcs, self.x_init.split()):
            if steady_state_dump_err[name]:
                err = Function(f.function_space(), name=name+'err').assign(f-f_init)
                field_dict[name+"err"] = err
                self.diagnostics.register(name+"err")
                to_dump.append(err)
                f_init.rename(f.name()+"_init")
                to_dump.append(f_init)
                to_pickup.append(f_init)

        # check if we are dumping perturbation fields. If we are, the
        # meanfields are provided in a dictionary. Here we set up the
        # perturbation fields.
        meanfields = defaultdict(lambda: None)
        meanfields.update(self.output.meanfields)
        for name, meanfield in meanfields.iteritems():
            if meanfield is not None:
                field = field_dict[name]
                diff = Function(
                    field.function_space(),
                    name=field.name()+"_perturbation").assign(field - meanfield)
                self.diagnostics.register(name+"perturbation")
                field_dict[name+"perturbation"] = diff
                to_dump.append(diff)
            mean_name = field.name() + "_bar"
            meanfield.rename(name=mean_name)
            to_dump.append(meanfield)
            to_pickup.append(meanfield)

        # make functions on latlon mesh, as specified by dumplist_latlon
        to_dump_latlon = []
        for name in self.output.dumplist_latlon:
            f = field_dict[name]
            f_ll = Function(functionspaceimpl.WithGeometry(f.function_space(), mesh_ll), val=f.topological, name=name+'_ll')
            field_dict_ll[name] = f_ll
            to_dump_latlon.append(f_ll)

        self.dumpdir = path.join("results", self.output.dirname)
        outfile = path.join(self.dumpdir, "field_output.pvd")
        if self.dumpfile is None:
            if self.mesh.comm.rank == 0 and path.exists(self.dumpdir) and not pickup:
                exit("results directory '%s' already exists" % self.dumpdir)
            self.dumpcount = itertools.count()
            self.dumpfile = File(outfile, project_output=self.output.project_fields, comm=self.mesh.comm)
            self.diagnostic_data = defaultdict(partial(defaultdict, list))

            # make output file for fields on latlon mesh if required
            if len(self.output.dumplist_latlon) > 0:
                outfile_latlon = path.join(self.dumpdir, "field_output_latlon.pvd")
                self.dumpfile_latlon = File(outfile_latlon, project_output=self.output.project_fields,
                                            comm=self.mesh.comm)

        if(pickup):
            # Open the checkpointing file for writing
            chkfile = path.join(self.dumpdir, "chkpt")
            with DumbCheckpoint(chkfile, mode=FILE_READ) as chk:
                # Recover all the fields from the checkpoint
                for field in to_pickup:
                    chk.load(field)
                t = chk.read_attribute("/","time")
                next(self.dumpcount)

        elif (next(self.dumpcount) % self.output.dumpfreq) == 0:

            print "DBG dumping", t

            # dump fields
            self.dumpfile.write(*to_dump)

            # dump fields on latlon mesh
            if len(self.output.dumplist_latlon) > 0:
                self.dumpfile_latlon.write(*to_dump_latlon)

            # compute diagnostics
            for name in self.diagnostics.fields:
                data = self.diagnostics.l2(field_dict[name])
                self.diagnostic_data[name]["l2"].append(data)

            # Open the checkpointing file (backup version)
            files = ["chkptbk", "chkpt"]
            for file in files:
                chkfile = path.join(self.dumpdir, file)
                with DumbCheckpoint(chkfile, mode=FILE_CREATE) as chk:
                    # Dump all the fields to a checkpoint
                    for field in to_dump:
                        chk.store(field)
                    chk.write_attribute("/","time",t)

        return t

    def diagnostic_dump(self):
        """
        Dump diagnostics dictionary
        """

        with open(path.join(self.dumpdir, "diagnostics.json"), "w") as f:
            f.write(json.dumps(self.diagnostic_data, indent=4))

    def initialise(self, initial_conditions):
        """
        Initialise state variables
        """

        for x, ic in zip(self.x_init.split(), initial_conditions):
            x.assign(ic)

    @abstractmethod
    def _build_spaces(self, mesh, vertical_degree, horizontal_degree, family):

        """
        Build function spaces:
        """
        pass

    def _allocate_state(self):
        """
        Construct Functions to store the state variables.
        """

        W = self.W
        self.xn = Function(W)
        self.x_init = Function(W)
        self.xstar = Function(W)
        self.xp = Function(W)
        self.xnp1 = Function(W)
        self.xrhs = Function(W)
        self.dy = Function(W)
# Pressure update
a2 = inner(grad(p), grad(q)) * dx
L2 = -(1 / k) * div(u1) * q * dx

# Velocity update
a3 = inner(u, v) * dx
L3 = inner(u1, v) * dx - k * inner(grad(p1), v) * dx

# Assemble matrices
A1 = assemble(a1)
A2 = assemble(a2)
A3 = assemble(a3)

# Create files for storing solution
ufile = File("results/velocity.pvd")
pfile = File("results/pressure.pvd")

# Time-stepping
t = dt

numSteps = int(T / dt)

for i in range(numSteps):

    # Update pressure boundary condition

    # Compute tentative velocity step
    # begin("Computing tentative velocity")
    b1 = assemble(L1)
    [bc.apply(A1, b1) for bc in bcu]
Beispiel #24
0
class State(object):
    """
    Build a model state to keep the variables in, and specify parameters.

    :arg mesh: The :class:`Mesh` to use.
    :arg vertical_degree: integer, required for vertically extruded meshes.
    Specifies the degree for the pressure space in the vertical
    (the degrees for other spaces are inferred). Defaults to None.
    :arg horizontal_degree: integer, the degree for spaces in the horizontal
    (specifies the degree for the pressure space, other spaces are inferred)
    defaults to 1.
    :arg family: string, specifies the velocity space family to use.
    Options:
    "RT": The Raviart-Thomas family (default, recommended for quads)
    "BDM": The BDM family
    "BDFM": The BDFM family
    :arg Coriolis: (optional) Coriolis function.
    :arg sponge_function: (optional) Function specifying a sponge layer.
    :arg timestepping: class containing timestepping parameters
    :arg output: class containing output parameters
    :arg parameters: class containing physical parameters
    :arg diagnostics: class containing diagnostic methods
    :arg fieldlist: list of prognostic field names
    :arg diagnostic_fields: list of diagnostic field classes
    :arg u_bc_ids: a list containing the ids of boundaries with no normal
                   component of velocity. These ids are passed to `DirichletBC`s. For
                   extruded meshes, top and bottom are added automatically.
    """
    def __init__(self,
                 mesh,
                 vertical_degree=None,
                 horizontal_degree=1,
                 family="RT",
                 Coriolis=None,
                 sponge_function=None,
                 hydrostatic=None,
                 timestepping=None,
                 output=None,
                 parameters=None,
                 diagnostics=None,
                 fieldlist=None,
                 diagnostic_fields=None,
                 u_bc_ids=None):

        self.family = family
        self.vertical_degree = vertical_degree
        self.horizontal_degree = horizontal_degree
        self.Omega = Coriolis
        self.mu = sponge_function
        self.hydrostatic = hydrostatic
        self.timestepping = timestepping
        if output is None:
            raise RuntimeError(
                "You must provide a directory name for dumping results")
        else:
            self.output = output
        self.parameters = parameters
        if fieldlist is None:
            raise RuntimeError(
                "You must provide a fieldlist containing the names of the prognostic fields"
            )
        else:
            self.fieldlist = fieldlist
        if diagnostics is not None:
            self.diagnostics = diagnostics
        else:
            self.diagnostics = Diagnostics(*fieldlist)
        if diagnostic_fields is not None:
            self.diagnostic_fields = diagnostic_fields
        else:
            self.diagnostic_fields = []
        if u_bc_ids is not None:
            self.u_bc_ids = u_bc_ids
        else:
            self.u_bc_ids = []

        # The mesh
        self.mesh = mesh

        # Build the spaces
        self._build_spaces(mesh, vertical_degree, horizontal_degree, family)

        # Allocate state
        self._allocate_state()
        if self.output.dumplist is None:
            self.output.dumplist = fieldlist
        self.fields = FieldCreator(fieldlist, self.xn, self.output.dumplist)

        # set up bcs
        V = self.fields('u').function_space()
        self.bcs = []
        if V.extruded:
            self.bcs.append(DirichletBC(V, 0.0, "bottom"))
            self.bcs.append(DirichletBC(V, 0.0, "top"))
        for id in self.u_bc_ids:
            self.bcs.append(DirichletBC(V, 0.0, id))

        self.dumpfile = None

        # figure out if we're on a sphere
        try:
            self.on_sphere = (mesh._base_mesh.geometric_dimension() == 3
                              and mesh._base_mesh.topological_dimension() == 2)
        except AttributeError:
            self.on_sphere = (mesh.geometric_dimension() == 3
                              and mesh.topological_dimension() == 2)

        #  build the vertical normal and define perp for 2d geometries
        dim = mesh.topological_dimension()
        if self.on_sphere:
            x = SpatialCoordinate(mesh)
            R = sqrt(inner(x, x))
            self.k = interpolate(x / R, mesh.coordinates.function_space())
            if dim == 2:
                outward_normals = CellNormal(mesh)
                self.perp = lambda u: cross(outward_normals, u)
        else:
            kvec = [0.0] * dim
            kvec[dim - 1] = 1.0
            self.k = Constant(kvec)
            if dim == 2:
                self.perp = lambda u: as_vector([-u[1], u[0]])

        # project test function for hydrostatic case
        if self.hydrostatic:
            self.h_project = lambda u: u - self.k * inner(u, self.k)
        else:
            self.h_project = lambda u: u

        #  Constant to hold current time
        self.t = Constant(0.0)

        # setup logger
        logger.setLevel(output.log_level)
        set_log_handler(mesh.comm)
        logger.info("Timestepping parameters that take non-default values:")
        logger.info(", ".join("%s: %s" % item
                              for item in vars(timestepping).items()))
        if parameters is not None:
            logger.info("Physical parameters that take non-default values:")
            logger.info(", ".join("%s: %s" % item
                                  for item in vars(parameters).items()))

    def setup_diagnostics(self):
        """
        Add special case diagnostic fields
        """
        for name in self.output.perturbation_fields:
            f = Perturbation(name)
            self.diagnostic_fields.append(f)

        for name in self.output.steady_state_error_fields:
            f = SteadyStateError(self, name)
            self.diagnostic_fields.append(f)

        fields = set([f.name() for f in self.fields])
        field_deps = [(d, sorted(set(d.required_fields).difference(fields), ))
                      for d in self.diagnostic_fields]
        schedule = topo_sort(field_deps)
        self.diagnostic_fields = schedule
        for diagnostic in self.diagnostic_fields:
            diagnostic.setup(self)
            self.diagnostics.register(diagnostic.name)

    def setup_dump(self, t, tmax, pickup=False):
        """
        Setup dump files
        Check for existence of directory so as not to overwrite
        output files
        Setup checkpoint file

        :arg tmax: model stop time
        :arg pickup: recover state from the checkpointing file if true,
        otherwise dump and checkpoint to disk. (default is False).
        """

        if any([
                self.output.dump_vtus, self.output.dumplist_latlon,
                self.output.dump_diagnostics, self.output.point_data,
                self.output.checkpoint and not pickup
        ]):
            # setup output directory and check that it does not already exist
            self.dumpdir = path.join("results", self.output.dirname)
            running_tests = '--running-tests' in sys.argv or "pytest" in self.output.dirname
            if self.mesh.comm.rank == 0:
                if not running_tests and path.exists(
                        self.dumpdir) and not pickup:
                    raise IOError("results directory '%s' already exists" %
                                  self.dumpdir)
                else:
                    if not running_tests:
                        makedirs(self.dumpdir)

        if self.output.dump_vtus:

            # setup pvd output file
            outfile = path.join(self.dumpdir, "field_output.pvd")
            self.dumpfile = File(outfile,
                                 project_output=self.output.project_fields,
                                 comm=self.mesh.comm)

            # make list of fields to dump
            self.to_dump = [field for field in self.fields if field.dump]

            # make dump counter
            self.dumpcount = itertools.count()

        # if there are fields to be dumped in latlon coordinates,
        # setup the latlon coordinate mesh and make output file
        if len(self.output.dumplist_latlon) > 0:
            mesh_ll = get_latlon_mesh(self.mesh)
            outfile_ll = path.join(self.dumpdir, "field_output_latlon.pvd")
            self.dumpfile_ll = File(outfile_ll,
                                    project_output=self.output.project_fields,
                                    comm=self.mesh.comm)

            # make functions on latlon mesh, as specified by dumplist_latlon
            self.to_dump_latlon = []
            for name in self.output.dumplist_latlon:
                f = self.fields(name)
                field = Function(functionspaceimpl.WithGeometry(
                    f.function_space(), mesh_ll),
                                 val=f.topological,
                                 name=name + '_ll')
                self.to_dump_latlon.append(field)

        # we create new netcdf files to write to, unless pickup=True, in
        # which case we just need the filenames
        if self.output.dump_diagnostics:
            diagnostics_filename = self.dumpdir + "/diagnostics.nc"
            self.diagnostic_output = DiagnosticsOutput(diagnostics_filename,
                                                       self.diagnostics,
                                                       self.output.dirname,
                                                       self.mesh.comm,
                                                       create=not pickup)

        if len(self.output.point_data) > 0:
            pointdata_filename = self.dumpdir + "/point_data.nc"
            ndt = int(tmax / self.timestepping.dt)
            self.pointdata_output = PointDataOutput(pointdata_filename,
                                                    ndt,
                                                    self.output.point_data,
                                                    self.output.dirname,
                                                    self.fields,
                                                    self.mesh.comm,
                                                    create=not pickup)

        # if we want to checkpoint and are not picking up from a previous
        # checkpoint file, setup the dumb checkpointing
        if self.output.checkpoint and not pickup:
            self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                        mode=FILE_CREATE)
            # make list of fields to pickup (this doesn't include
            # diagnostic fields)
            self.to_pickup = [field for field in self.fields if field.pickup]

        # if we want to checkpoint then make a checkpoint counter
        if self.output.checkpoint:
            self.chkptcount = itertools.count()

        # dump initial fields
        self.dump(t)

    def pickup_from_checkpoint(self):
        """
        :arg t: the current model time (default is zero).
        """
        if self.output.checkpoint:
            # Open the checkpointing file for writing
            chkfile = path.join(self.dumpdir, "chkpt")
            with DumbCheckpoint(chkfile, mode=FILE_READ) as chk:
                # Recover all the fields from the checkpoint
                for field in self.to_pickup:
                    chk.load(field)
                t = chk.read_attribute("/", "time")
                next(self.dumpcount)
            # Setup new checkpoint
            self.chkpt = DumbCheckpoint(path.join(self.dumpdir, "chkpt"),
                                        mode=FILE_CREATE)
        else:
            raise ValueError("Must set checkpoint True if pickup")

        return t

    def dump(self, t):
        """
        Dump output
        """
        output = self.output

        # Diagnostics:
        # Compute diagnostic fields
        for field in self.diagnostic_fields:
            field(self)

        if output.dump_diagnostics:
            # Output diagnostic data
            self.diagnostic_output.dump(self, t)

        if len(output.point_data) > 0:
            # Output pointwise data
            self.pointdata_output.dump(self.fields, t)

        # Dump all the fields to the checkpointing file (backup version)
        if output.checkpoint and (next(self.chkptcount) %
                                  output.chkptfreq) == 0:
            for field in self.to_pickup:
                self.chkpt.store(field)
            self.chkpt.write_attribute("/", "time", t)

        if output.dump_vtus and (next(self.dumpcount) % output.dumpfreq) == 0:
            # dump fields
            self.dumpfile.write(*self.to_dump)

            # dump fields on latlon mesh
            if len(output.dumplist_latlon) > 0:
                self.dumpfile_ll.write(*self.to_dump_latlon)

    def initialise(self, initial_conditions):
        """
        Initialise state variables

        :arg initial_conditions: An iterable of pairs (field_name, pointwise_value)
        """
        for name, ic in initial_conditions:
            f_init = getattr(self.fields, name)
            f_init.assign(ic)
            f_init.rename(name)

    def set_reference_profiles(self, reference_profiles):
        """
        Initialise reference profiles

        :arg reference_profiles: An iterable of pairs (field_name, interpolatory_value)
        """
        for name, profile in reference_profiles:
            field = getattr(self.fields, name)
            ref = self.fields(name + 'bar', field.function_space(), False)
            ref.interpolate(profile)

    def _build_spaces(self, mesh, vertical_degree, horizontal_degree, family):
        """
        Build:
        velocity space self.V2,
        pressure space self.V3,
        temperature space self.Vt,
        mixed function space self.W = (V2,V3,Vt)
        """

        self.spaces = SpaceCreator()
        if vertical_degree is not None:
            # horizontal base spaces
            cell = mesh._base_mesh.ufl_cell().cellname()
            S1 = FiniteElement(family,
                               cell,
                               horizontal_degree + 1,
                               variant="equispaced")
            S2 = FiniteElement("DG",
                               cell,
                               horizontal_degree,
                               variant="equispaced")

            # vertical base spaces
            T0 = FiniteElement("CG",
                               interval,
                               vertical_degree + 1,
                               variant="equispaced")
            T1 = FiniteElement("DG",
                               interval,
                               vertical_degree,
                               variant="equispaced")

            # build spaces V2, V3, Vt
            V2h_elt = HDiv(TensorProductElement(S1, T1))
            V2t_elt = TensorProductElement(S2, T0)
            V3_elt = TensorProductElement(S2, T1)
            V2v_elt = HDiv(V2t_elt)
            V2_elt = V2h_elt + V2v_elt

            V0 = self.spaces("HDiv", mesh, V2_elt)
            V1 = self.spaces("DG", mesh, V3_elt)
            V2 = self.spaces("HDiv_v", mesh, V2t_elt)

            self.Vv = self.spaces("Vv", mesh, V2v_elt)

            DG1_hori_elt = FiniteElement("DG", cell, 1, variant="equispaced")
            DG1_vert_elt = FiniteElement("DG",
                                         interval,
                                         1,
                                         variant="equispaced")
            DG1_elt = TensorProductElement(DG1_hori_elt, DG1_vert_elt)
            self.DG1_space = self.spaces("DG1", mesh, DG1_elt)

            self.W = MixedFunctionSpace((V0, V1, V2))

        else:
            cell = mesh.ufl_cell().cellname()
            V1_elt = FiniteElement(family,
                                   cell,
                                   horizontal_degree + 1,
                                   variant="equispaced")
            DG_elt = FiniteElement("DG",
                                   cell,
                                   horizontal_degree,
                                   variant="equispaced")
            DG1_elt = FiniteElement("DG", cell, 1, variant="equispaced")

            V0 = self.spaces("HDiv", mesh, V1_elt)
            V1 = self.spaces("DG", mesh, DG_elt)
            self.DG1_space = self.spaces("DG1", mesh, DG1_elt)

            self.W = MixedFunctionSpace((V0, V1))

    def _allocate_state(self):
        """
        Construct Functions to store the state variables.
        """

        W = self.W
        self.xn = Function(W)
        self.xstar = Function(W)
        self.xp = Function(W)
        self.xnp1 = Function(W)
        self.xrhs = Function(W)
        self.xb = Function(W)  # store the old state for diagnostics
        self.dy = Function(W)
Beispiel #25
0
    "num_receivers": len(receivers),
    "receiver_locations": receivers,
}
model["timeaxis"] = {
    "t0": 0.0,  #  Initial time for event
    "tf": 4.00,  # Final time for event
    "dt": 0.00075,
    "amplitude": 1,  # the Ricker has an amplitude of 1.
    "nspool": 100,  # how frequently to output solution to pvds
    "fspool": 99999,  # how frequently to save solution to RAM
}
comm = spyro.utils.mpi_init(model)
mesh, V = spyro.io.read_mesh(model, comm)
vp = spyro.io.interpolate(model, mesh, V, guess=False)
if comm.ensemble_comm.rank == 0:
    File("true_velocity.pvd", comm=comm.comm).write(vp)
sources = spyro.Sources(model, mesh, V, comm)
receivers = spyro.Receivers(model, mesh, V, comm)
wavelet = spyro.full_ricker_wavelet(
    dt=model["timeaxis"]["dt"],
    tf=model["timeaxis"]["tf"],
    freq=model["acquisition"]["frequency"],
)
t1 = time.time()
p, p_r = spyro.solvers.forward(model,
                               mesh,
                               comm,
                               vp,
                               sources,
                               wavelet,
                               receivers,
Beispiel #26
0
def run(steady=False):
    """
    solve CdT/dt = S + div(k*grad(T))
    => C*v*(dT/dt)/k*dx - S*v/k*dx + grad(v)*grad(T)*dx = v*dot(grad(T), n)*ds
    """
    steps = 250
    dt = 1e-10
    timescale = (0, steps * dt)
    if steady:
        print('Running steady state.')
    else:
        print(f'Running with time step {dt:.2g}s on time interval: '
              f'{timescale[0]:.2g}s - {timescale[1]:.2g}s')
    dt_invc = Constant(1 / dt)
    extent = [40e-6, 40e-6, 40e-6]
    mesh = BoxMesh(20, 20, 20, *extent)

    V = FunctionSpace(mesh, 'CG', 1)
    print(V.dim())

    T = Function(V)  # temperature at time i+1 (electron for now)
    T_ = Function(V)  # temperature at time i
    v = TestFunction(V)  # test function

    S = create_S(mesh, V, extent)
    C = create_heat_capacity(mesh, V, extent)
    k = create_conductivity(mesh, V, T)

    set_initial_value(mesh, T_, extent)

    # Mass matrix section
    M = C * T * dt_invc * v * dx
    M_ = C * T_ * dt_invc * v * dx
    # Stiffness matrix section
    A = k * dot(grad(T), grad(v)) * dx
    # function section
    f = S * v * dx
    # boundaries
    bcs, R, b = create_dirichlet_bounds(mesh,
                                        V,
                                        T,
                                        v,
                                        k,
                                        g=100,
                                        boundary=[1, 2, 3, 4, 5, 6])
    # bcs += create_dirichlet_bounds(mesh, V, T, v, k, 500, [6])[0]
    # bcs, R, b = create_robin_bounds(mesh, T, v, k, 1e8/(100), 1e8)

    if steady:
        steps = 1
        a = A + R
        L = f + b
    else:
        a = M + A + R
        L = M_ + f + b

    prob = NonlinearVariationalProblem(a - L, T, bcs=bcs)
    solver = NonlinearVariationalSolver(prob, solver_parameters=SOLVE_PARAMS)

    T.assign(T_)

    timestamp = datetime.now().strftime("%d-%b-%Y-%H-%M-%S")
    outfile = File(f'{timestamp}/first_output.pvd')
    outfile.write(T_, target_degree=1, target_continuity=H1)
    last_perc = 0
    for i in range(steps):
        solver.solve()

        perc = int(100 * (i + 1) / steps)
        if perc > last_perc:
            print(f'{perc}%')
            last_perc = perc

        T_.assign(T)
        outfile.write(T_, target_degree=1, target_continuity=H1)