Beispiel #1
0
 def load_precomputed_bessel_functions(self, PS):
     """ loads precomputed Bessel functions (modes of analytic solution) """
     f = HDF5File(mpi_comm_world(), 'precomputed/precomputed_' + self.precomputed_filename + '.hdf5', 'r')
     temp = toc()
     fce = Function(PS)
     f.read(fce, "parab")
     self.bessel_parabolic = fce.copy(deepcopy=True)
     for i in range(8):
         f.read(fce, "real%d" % i)
         self.bessel_real.append(fce.copy(deepcopy=True))
         f.read(fce, "imag%d" % i)
         self.bessel_complex.append(fce.copy(deepcopy=True))
     print("Loaded partial solution functions. Time: %f" % (toc() - temp))
Beispiel #2
0
 def load_precomputed_bessel_functions(self, PS):
     """ loads precomputed Bessel functions (modes of analytic solution) """
     f = HDF5File(
         mpi_comm_world(),
         'precomputed/precomputed_' + self.precomputed_filename + '.hdf5',
         'r')
     temp = toc()
     fce = Function(PS)
     f.read(fce, "parab")
     self.bessel_parabolic = fce.copy(deepcopy=True)
     for i in range(8):
         f.read(fce, "real%d" % i)
         self.bessel_real.append(fce.copy(deepcopy=True))
         f.read(fce, "imag%d" % i)
         self.bessel_complex.append(fce.copy(deepcopy=True))
     print("Loaded partial solution functions. Time: %f" % (toc() - temp))
    def __init__(self, f_msr, t_msr=None, **kwargs):
        '''

        Parameters
        ----------
        f_msr : sequence of dolfin.Function's.
            Sequence of measurement snapshots.
        t_msr : a sequence of ascending values or a single value (optional)
            Measurement times. Could be a sequence of values for the measurement
            snapshots, or a sequence of two values for the first time and the
            last time of the snapshots, or a single value for the last time.

        Keyword Parameters
        ------------------
        degree : int
            The `degree` must be given if no `element` is given.
        element : dolfin.Element (optional)
            The `element` must be given if no `degree` is given.

        '''

        super().__init__(f_msr, t_msr, **kwargs)
        self._msr_f_cur = Function.copy(f_msr[0], deepcopy=True)
class Data(object):
    def __init__(self, Th, callback_type):
        # Create mesh and define function space
        mesh = UnitSquareMesh(Th, Th)
        self.V = FunctionSpace(mesh, "Lagrange", 1)
        # Define variational problem
        du = TrialFunction(self.V)
        v = TestFunction(self.V)
        self.u = Function(self.V)
        self.r = lambda u, g: inner(grad(u), grad(v)) * dx + inner(
            u + u**3, v) * dx - g * v * dx
        self.j = lambda u, r: derivative(r, u, du)
        # Define initial guess
        self.initial_guess_expression = Expression(
            "0.1 + 0.9*x[0]*x[1]", element=self.V.ufl_element())
        # Define callback function depending on callback type
        assert callback_type in ("form callbacks", "tensor callbacks")
        if callback_type == "form callbacks":

            def callback(arg):
                return arg
        elif callback_type == "tensor callbacks":

            def callback(arg):
                return assemble(arg)

        self.callback_type = callback_type
        self.callback = callback

    def generate_random(self):
        # Generate random forcing
        g = RandomDolfinFunction(self.V)
        # Generate correspondingly residual and jacobian forms
        r = self.r(self.u, g)
        j = self.j(self.u, r)

        # Prepare problem wrapper
        class ProblemWrapper(NonlinearProblemWrapper):
            # Residual and jacobian functions
            def residual_eval(self_, solution):
                return self.callback(r)

            def jacobian_eval(self_, solution):
                return self.callback(j)

            # Define boundary condition
            def bc_eval(self_):
                return None

            # Empty solution monitor
            def monitor(self_, solution):
                pass

        problem_wrapper = ProblemWrapper()
        # Return
        return (r, j, problem_wrapper)

    def evaluate_builtin(self, r, j, problem_wrapper):
        project(self.initial_guess_expression, self.V, function=self.u)
        solve(r == 0,
              self.u,
              J=j,
              solver_parameters={
                  "nonlinear_solver": "snes",
                  "snes_solver": {
                      "linear_solver": "mumps",
                      "maximum_iterations": 20,
                      "relative_tolerance": 1e-9,
                      "absolute_tolerance": 1e-9,
                      "maximum_residual_evaluations": 10000,
                      "report": True
                  }
              })
        return self.u.copy(deepcopy=True)

    def evaluate_backend(self, r, j, problem_wrapper):
        project(self.initial_guess_expression, self.V, function=self.u)
        solver = NonlinearSolver(problem_wrapper, self.u)
        solver.set_parameters({
            "linear_solver": "mumps",
            "maximum_iterations": 20,
            "relative_tolerance": 1e-9,
            "absolute_tolerance": 1e-9,
            "report": True
        })
        solver.solve()
        return self.u.copy(deepcopy=True)

    def assert_backend(self, r, j, problem_wrapper, result_backend):
        result_builtin = self.evaluate_builtin(r, j, problem_wrapper)
        error = Function(self.V)
        error.vector().add_local(+result_backend.vector().get_local())
        error.vector().add_local(-result_builtin.vector().get_local())
        error.vector().apply("add")
        relative_error = error.vector().norm(
            "l2") / result_builtin.vector().norm("l2")
        assert isclose(relative_error, 0., atol=1e-12)
Beispiel #5
0
class VTKToDOLFIN(object):
    """
    A wrapper around vtk to simplify handling of VTK files
    generated from DOLFIN.

    The class handles reading of data into DOLFIN objects for further processing
    
    """
    def __init__(self, filename, mesh=None, deepcopy=False):
        """
        Initialize a the reader with a pvd or a vtu filename
        """
        if not os.path.isfile(filename):
            raise IOError("File '%s' does not excist"%filename)
        filetype = filename.split(".")[-1]
        self._name = ".".join(filename.split(".")[0:-1])
        if filetype not in ["pvd", "vtu"]:
            raise TypeError("Expected a 'pvd' or a 'vtu' file")

        # Get dirname
        dirname = os.path.dirname(filename)
        
        # Check mesh argument
        if mesh is not None and not isinstance(mesh, Mesh):
            raise TypeError, "Expected a 'Mesh' for the mesh arguments"

        # Store deepcopy argument
        self._deepcopy = deepcopy
        
        # Store mesh
        self._mesh = mesh
        
        # Initialize the filename cache
        self._filenames = []
        if filetype == "vtu":
            self._filenames.append(filename)
            self._times = np.array([])
        else:
            # Parse pvd file
            tree = ElementTree(file=filename)
            times = []
            for item in tree.iter():
                if item.tag == "DataSet":
                    self._filenames.append(os.path.join(\
                        dirname,item.attrib["file"]))
                    times.append(float(item.attrib["timestep"]))
            
            times = np.array(times, dtype='d')

            # If there are no time data stored in the file use an empty array
            if np.all(np.diff(times)==1):
                times = np.array([])

            # Store time data
            self._times = times

        # Construct file reader
        self.reader = vtk.vtkXMLUnstructuredGridReader()
        
        # Read in data from file
        self._update_vtk_data()

        # Init dolfin structures (Function, FunctionSpace)
        self._init_dolfin_data()

    def _update_vtk_data(self, index=0):
        "Set a new data file"

        # Update file name
        print "Reading '%s'"%self._filenames[index]
        self.reader.SetFileName(self._filenames[index])
        
        # Read data
        self.reader.Update()
        
        # Set data type (scalar or vector)
        # FIXME: Include Tensors when that is supported by DOLFIN
        self.scalar = self.reader.GetOutput().GetPointData().GetScalars() is not None

        print "Scalar data set" if self.scalar else "Vector data set"
        
    def _init_dolfin_data(self):
        "Update DOLFIN function from vtk data"
        
        if self.reader.GetNumberOfPointArrays() != 1:
            raise ValueError("Expected the vtk file to include one data "\
                             "set per vertex.")

        # Initilize FunctionSpace and Function if not initialized
        if self.scalar:
            self._V = FunctionSpace(self.mesh(), "CG", 1)
        else:
            self._V = VectorFunctionSpace(self.mesh(), "CG", 1)
        
        self._u = Function(self._V)
        
    def _update_dolfin_data(self):
        "Update dolfin data from present VTK file"
        
        # Get VTK point data
        point_data = self.reader.GetOutput().GetPointData()

        # Get data and update Function
        if self.scalar:
            self._u.vector()[:] = array_handler.vtk2array(point_data.GetScalars())
        else:
            values = array_handler.vtk2array(point_data.GetVectors()).transpose()
            self._u.vector()[:] = np.reshape(values, (np.prod(values.shape),))
    
    def functions_space(self):
        "Return the FunctionSpace"
        return self._V
    
    def mesh(self):
        "Return the dolfin mesh"

        # If no mesh is stored read in from UnstructuredGridData
        if self._mesh is None:
            self._mesh = vtk_ug_to_dolfin_mesh(self.reader.GetOutput())

        # Small sanity check, only works in parallel
        if MPI.size(mpi_comm_world()) == 1:
            assert(self._mesh.num_vertices() == \
                self.reader.GetOutput().GetNumberOfPoints() and \
                self._mesh.num_cells() == \
                self.reader.GetOutput().GetNumberOfCells())
        
        return self._mesh
    
    def name(self):
        "Return the name"
        return self._name

    def __getitem__(self, index):
        "x.__getitem__(y) <==> x[y]"
        # Update data structures to next index if not out of files
        if not isinstance(index, int):
            raise TypeError("Expected an int for the index argument")
        if index < 0 or index >= len(self):
            raise IndexError("index need to be smaller than ")

        # Update the stored data
        self._update_vtk_data(index)
        self._update_dolfin_data()

        # Should we return a copy of the stored data?
        u = self._u.copy() if self._deepcopy else self._u

        # If time is registered return with this information
        if len(self._times):
            return self._times[index], u
        
        return u

    def __len__(self):
        "x.__len__() <==> len(x)"
        return len(self._filenames)

    def __iter__(self):
        "x.__iter__() <==> iter(x)"
        for i in xrange(len(self)):
            yield self[i]
Beispiel #6
0
class VTKToDOLFIN(object):
    """
    A wrapper around vtk to simplify handling of VTK files
    generated from DOLFIN.

    The class handles reading of data into DOLFIN objects for further processing
    
    """
    def __init__(self, filename, mesh=None, deepcopy=False):
        """
        Initialize a the reader with a pvd or a vtu filename
        """
        if not os.path.isfile(filename):
            raise IOError("File '%s' does not excist" % filename)
        filetype = filename.split(".")[-1]
        self._name = ".".join(filename.split(".")[0:-1])
        if filetype not in ["pvd", "vtu"]:
            raise TypeError("Expected a 'pvd' or a 'vtu' file")

        # Get dirname
        dirname = os.path.dirname(filename)

        # Check mesh argument
        if mesh is not None and not isinstance(mesh, Mesh):
            raise TypeError, "Expected a 'Mesh' for the mesh arguments"

        # Store deepcopy argument
        self._deepcopy = deepcopy

        # Store mesh
        self._mesh = mesh

        # Initialize the filename cache
        self._filenames = []
        if filetype == "vtu":
            self._filenames.append(filename)
            self._times = np.array([])
        else:
            # Parse pvd file
            tree = ElementTree(file=filename)
            times = []
            for item in tree.iter():
                if item.tag == "DataSet":
                    self._filenames.append(os.path.join(\
                        dirname,item.attrib["file"]))
                    times.append(float(item.attrib["timestep"]))

            times = np.array(times, dtype='d')

            # If there are no time data stored in the file use an empty array
            if np.all(np.diff(times) == 1):
                times = np.array([])

            # Store time data
            self._times = times

        # Construct file reader
        self.reader = vtk.vtkXMLUnstructuredGridReader()

        # Read in data from file
        self._update_vtk_data()

        # Init dolfin structures (Function, FunctionSpace)
        self._init_dolfin_data()

    def _update_vtk_data(self, index=0):
        "Set a new data file"

        # Update file name
        print "Reading '%s'" % self._filenames[index]
        self.reader.SetFileName(self._filenames[index])

        # Read data
        self.reader.Update()

        # Set data type (scalar or vector)
        # FIXME: Include Tensors when that is supported by DOLFIN
        self.scalar = self.reader.GetOutput().GetPointData().GetScalars(
        ) is not None

        print "Scalar data set" if self.scalar else "Vector data set"

    def _init_dolfin_data(self):
        "Update DOLFIN function from vtk data"

        if self.reader.GetNumberOfPointArrays() != 1:
            raise ValueError("Expected the vtk file to include one data "\
                             "set per vertex.")

        # Initilize FunctionSpace and Function if not initialized
        if self.scalar:
            self._V = FunctionSpace(self.mesh(), "CG", 1)
        else:
            self._V = VectorFunctionSpace(self.mesh(), "CG", 1)

        self._u = Function(self._V)

    def _update_dolfin_data(self):
        "Update dolfin data from present VTK file"

        # Get VTK point data
        point_data = self.reader.GetOutput().GetPointData()

        # Get data and update Function
        if self.scalar:
            self._u.vector()[:] = array_handler.vtk2array(
                point_data.GetScalars())
        else:
            values = array_handler.vtk2array(
                point_data.GetVectors()).transpose()
            self._u.vector()[:] = np.reshape(values, (np.prod(values.shape), ))

    def functions_space(self):
        "Return the FunctionSpace"
        return self._V

    def mesh(self):
        "Return the dolfin mesh"

        # If no mesh is stored read in from UnstructuredGridData
        if self._mesh is None:
            self._mesh = vtk_ug_to_dolfin_mesh(self.reader.GetOutput())

        # Small sanity check, only works in parallel
        if MPI.size(mpi_comm_world()) == 1:
            assert(self._mesh.num_vertices() == \
                self.reader.GetOutput().GetNumberOfPoints() and \
                self._mesh.num_cells() == \
                self.reader.GetOutput().GetNumberOfCells())

        return self._mesh

    def name(self):
        "Return the name"
        return self._name

    def __getitem__(self, index):
        "x.__getitem__(y) <==> x[y]"
        # Update data structures to next index if not out of files
        if not isinstance(index, int):
            raise TypeError("Expected an int for the index argument")
        if index < 0 or index >= len(self):
            raise IndexError("index need to be smaller than ")

        # Update the stored data
        self._update_vtk_data(index)
        self._update_dolfin_data()

        # Should we return a copy of the stored data?
        u = self._u.copy() if self._deepcopy else self._u

        # If time is registered return with this information
        if len(self._times):
            return self._times[index], u

        return u

    def __len__(self):
        "x.__len__() <==> len(x)"
        return len(self._filenames)

    def __iter__(self):
        "x.__iter__() <==> iter(x)"
        for i in xrange(len(self)):
            yield self[i]