예제 #1
1
파일: mesh.py 프로젝트: LeiDai/sfepy
    def __init__(self, name='mesh', filename=None,
                 prefix_dir=None, **kwargs):
        """Create a Mesh.

        Parameters
        ----------
        name : str
            Object name.
        filename : str
            Loads a mesh from the specified file, if not None.
        prefix_dir : str
            If not None, the filename is relative to that directory.
        """
        Struct.__init__(self, name=name, **kwargs)
        self.nodal_bcs = {}

        if filename is None:
            self.io = None
            self.setup_done = 0

        else:
            io = MeshIO.any_from_filename(filename, prefix_dir=prefix_dir)
            output('reading mesh (%s)...' % (io.filename))
            tt = time.clock()
            io.read(self)
            output('...done in %.2f s' % (time.clock() - tt))
            self._set_shape_info()
예제 #2
0
파일: extract_edges.py 프로젝트: rc/sfepy
def main():
    parser = ArgumentParser(description=__doc__)
    parser.add_argument('--version', action='version', version='%(prog)s')
    parser.add_argument('--eps', action='store', dest='eps',
                        default=1e-12, help=helps['eps'])
    parser.add_argument('-o', '--filename-out',
                        action='store', dest='filename_out',
                        default=None, help=helps['filename-out'])
    parser.add_argument('filename')
    options = parser.parse_args()

    filename = options.filename

    mesh = Mesh.from_file(filename)
    mesh_out = extract_edges(mesh, eps=float(options.eps))
    mesh_out = merge_lines(mesh_out)

    filename_out = options.filename_out
    if filename_out is None:
        filename_out = edit_filename(filename, prefix='edge_', new_ext='.vtk')

    output('Outline mesh - vertices: %d, edges: %d, output filename: %s'
           % (mesh_out[0].shape[0], mesh_out[2][0].shape[0], filename_out))

    # hack to write '3_2' elements - edges
    io = VTKMeshIO(None)
    aux_mesh = Struct()
    aux_mesh._get_io_data = lambda: mesh_out
    aux_mesh.n_el = mesh_out[2][0].shape[0]
    io.write(filename_out, aux_mesh)
예제 #3
0
파일: probes.py 프로젝트: clazaro/sfepy
    def __init__(self, name, share_geometry=True, n_point=None, **kwargs):
        """
        Parameters
        ----------
        name : str
            The probe name, set automatically by the subclasses.
        share_geometry : bool
            Set to True to indicate that all the probes will work on the same
            domain. Certain data are then computed only for the first probe and
            cached.
        n_point : int
           The (fixed) number of probe points, when positive. When non-positive,
           the number of points is adaptively increased starting from -n_point,
           until the neighboring point distance is less than the diameter of the
           elements enclosing the points. When None, it is set to -10.

        For additional parameters see the __init__() docstrings of the
        subclasses.
        """
        Struct.__init__(self, name=name, share_geometry=share_geometry,
                        **kwargs)

        self.set_n_point(n_point)

        self.options = Struct(close_limit=0.1, size_hint=None)
        self.cache = Struct(name='probe_local_evaluate_cache')

        self.is_refined = False
예제 #4
0
    def __init__(self, name, definition, domain, parse_def):
        """
        Create region instance.

        Parameters
        ----------
        name : str
            The region name, either given, or automatic for intermediate
            regions.
        definition : str
            The region selector definition.
        domain : Domain instance
            The domain of the region.
        parse_def : str
            The parsed definition of the region.

        Notes
        -----
        conns, vertex_groups are links to domain data.
        """
        Struct.__init__(self,
                        name=name, definition=definition,
                        n_v_max=domain.shape.n_nod, domain=domain,
                        parse_def=parse_def, all_vertices=None,
                        igs=[], vertices={}, edges={}, faces={},
                        cells={}, fis={},
                        can_cells=True, true_cells={}, must_update=True,
                        is_complete=False,
                        mirror_region=None, ig_map=None,
                        ig_map_i=None)
예제 #5
0
 def __init__( self, conf, options, output_prefix, **kwargs ):
     Struct.__init__( self,
                      conf = conf,
                      options = options,
                      output_prefix = output_prefix )
     output.prefix = self.output_prefix
     self.restore()
예제 #6
0
파일: probes.py 프로젝트: animator/sfepy
def read_header(fd):
    """
    Read the probe data header from file descriptor fd.

    Returns
    -------
    header : Struct instance
        The probe data header.
    """
    header = Struct(name='probe_data_header')
    header.probe_class = fd.readline().strip()

    aux = fd.readline().strip().split(':')[1]
    header.n_point = int(aux.strip().split()[0])

    details = []
    while 1:
        line = fd.readline().strip()

        if line == '-----':
            break
        else:
            details.append(line)
    header.details = '\n'.join(details)

    return header
예제 #7
0
    def __init__(self, name, definition, domain, parse_def, kind='cell',
                 parent=None):
        """
        Create region instance.

        Parameters
        ----------
        name : str
            The region name, either given, or automatic for intermediate
            regions.
        definition : str
            The region selector definition.
        domain : Domain instance
            The domain of the region.
        parse_def : str
            The parsed definition of the region.
        kind : str
            The region kind - one of 'cell', 'facet', 'face', 'edge', 'vertex',
            'cell_only', ..., 'vertex_only'.
        parent : str, optional
            The name of the parent region.
        """
        tdim = domain.shape.tdim
        Struct.__init__(self,
                        name=name, definition=definition,
                        domain=domain, parse_def=parse_def,
                        n_v_max=domain.shape.n_nod, dim=domain.shape.dim,
                        tdim=tdim, kind_tdim=None,
                        entities=[None] * (tdim + 1),
                        kind=None, parent=parent, shape=None,
                        mirror_region=None, is_empty=False)
        self.set_kind(kind)
예제 #8
0
파일: oseen.py 프로젝트: snilek/sfepy
 def __init__(self, name_map, gamma=None, delta=None, tau=None, tau_red=1.0,
              tau_mul=1.0, delta_mul=1.0, gamma_mul=1.0,
              diameter_mode='max'):
     Struct.__init__(self, name_map=name_map,
                     gamma=gamma, delta=delta, tau=tau,
                     tau_red=tau_red, tau_mul=tau_mul, delta_mul=delta_mul,
                     gamma_mul=gamma_mul, diameter_mode=diameter_mode)
예제 #9
0
    def __init__(self, anchor, normal, bounds):
        Struct.__init__(self, anchor=nm.array(anchor, dtype=nm.float64),
                        bounds=nm.asarray(bounds, dtype=nm.float64))
        self.normal = nm.asarray(normal, dtype=nm.float64)

        norm = nm.linalg.norm
        self.normal /= norm(self.normal)

        e3 = [0.0, 0.0, 1.0]
        dd = nm.dot(e3, self.normal)
        rot_angle = nm.arccos(dd)

        if nm.abs(rot_angle) < 1e-14:
            mtx = nm.eye(3, dtype=nm.float64)
            bounds2d = self.bounds[:, :2]

        else:
            rot_axis = nm.cross([0.0, 0.0, 1.0], self.normal)
            mtx = la.make_axis_rotation_matrix(rot_axis, rot_angle)

            mm = la.insert_strided_axis(mtx, 0, self.bounds.shape[0])
            rbounds = la.dot_sequences(mm, self.bounds)
            bounds2d = rbounds[:, :2]

        assert_(nm.allclose(nm.dot(mtx, self.normal), e3,
                            rtol=0.0, atol=1e-12))

        self.adotn = nm.dot(self.anchor, self.normal)

        self.rot_angle = rot_angle
        self.mtx = mtx
        self.bounds2d = bounds2d
예제 #10
0
파일: region.py 프로젝트: mfkiwl/sfepy
    def __init__(self, name, definition, domain, parse_def, kind='cell',
                 parent=None):
        """
        Create region instance.

        Parameters
        ----------
        name : str
            The region name, either given, or automatic for intermediate
            regions.
        definition : str
            The region selector definition.
        domain : Domain instance
            The domain of the region.
        parse_def : str
            The parsed definition of the region.

        Notes
        -----
        conns, vertex_groups are links to domain data.
        """
        tdim = domain.shape.tdim
        Struct.__init__(self,
                        name=name, definition=definition,
                        domain=domain, parse_def=parse_def,
                        n_v_max=domain.shape.n_nod, dim=domain.shape.dim,
                        tdim=tdim,
                        entities=[None] * (tdim + 1),
                        kind=None, parent=parent, shape=None,
                        mirror_region=None, ig_map=None,
                        ig_map_i=None)
        self.set_kind(kind)
예제 #11
0
파일: solvers.py 프로젝트: Gkdnz/sfepy
    def process_conf(cls, conf, kwargs):
        """
        Process configuration parameters.
        """
        get = make_get_conf(conf, kwargs)

        if len(cls._parameters) and cls._parameters[0][0] != 'name':
            options = Solver._parameters + cls._parameters

        else:
            options = cls._parameters

        opts = Struct()
        allow_extra = False
        for name, _, default, required, _ in options:
            if name == '*':
                allow_extra = True
                continue

            msg = ('missing "%s" in options!' % name) if required else None
            setattr(opts, name, get(name, default, msg))

        if allow_extra:
            all_keys = set(conf.to_dict().keys())
            other = all_keys.difference(opts.to_dict().keys())
            for name in other:
                setattr(opts, name, get(name, None, None))

        return opts
예제 #12
0
파일: matcoefs.py 프로젝트: lokik/sfepy
    def init(self, young=None, poisson=None, bulk=None, lam=None,
             mu=None, p_wave=None):
        """
        Set exactly two of the elastic constants, and compute the
        remaining. (Re)-initializes the existing instance of ElasticConstants.
        """
        Struct.__init__(self, young=young, poisson=poisson, bulk=bulk, lam=lam,
                        mu=mu, p_wave=p_wave)

        values = {}
        for key, val in six.iteritems(self.__dict__):
            if (key in self.names) and (val is not None):
                sym = getattr(self.ec, key)
                values[sym] = val

        known = list(values.keys())
        if len(known) != 2:
            raise ValueError('exactly two elastic constants must be provided!')
        known = [ii.name for ii in known]

        unknown = set(self.names).difference(known)

        for name in unknown:
            key = tuple(sorted(known)) + (name,)
            val = float(self.relations[key].n(subs=values))
            setattr(self, name, val)
예제 #13
0
    def __init__(self, name, kind, domain, single_facets, n_obj, indices, facets):
        Struct.__init__(
            self,
            name=name,
            kind=kind,
            domain=domain,
            single_facets=single_facets,
            n_obj=n_obj,
            indices=indices,
            facets=facets,
        )
        self.n_all_obj, self.n_col = facets.shape
        self.n_gr = len(self.n_obj)

        self.indx = {}
        ii = 0
        for ig, nn in enumerate(self.n_obj):
            self.indx[ig] = slice(ii, ii + nn)
            ii += nn

        self.n_fps_vec = nm.empty(self.n_all_obj, dtype=nm.int32)
        self.n_fps = {}
        for ig, facet in self.single_facets.iteritems():
            self.n_fps_vec[self.indx[ig]] = facet.shape[1]
            self.n_fps[ig] = facet.shape[1]
예제 #14
0
    def __init__(self, name, dtype, shape, region,
                 space='H1', poly_space_base='lagrange', approx_order=1):
        """Create a Field.

        Parameters
        ----------
        name : str
            Object name.
        dtype : numpy.dtype
            Field data type: float64 or complex128.
        shape : int/tuple/str
            Field shape: 1 or (1,) or 'scalar', space dimension (2, or
            (2,) or 3 or (3,)) or 'vector'. The field shape determines
            the shape of the FE base functions and can be different from
            a FieldVariable instance shape. (TODO)

        region : Region
            The region where the field is defined.
        space : str
            The function space name.
        poly_space_base : str
            The name of polynomial space base.
        approx_order : int/str
            FE approximation order, e.g. 0, 1, 2, '1B' (1 with bubble).

        Notes
        -----
        Assumes one cell type for the whole region!
        """
        if isinstance(shape, str):
            try:
                shape = {'scalar' : (1,),
                         'vector' : (region.domain.shape.dim,)}[shape]
            except KeyError:
                raise ValueError('unsupported field shape! (%s)', shape)

        elif isinstance(shape, int):
            shape = (shape,)

        Struct.__init__(self,
                        name = name,
                        dtype = dtype,
                        shape = shape,
                        region = region,
                        space = space,
                        poly_space_base = poly_space_base)
        self.domain = self.region.domain

        self.clear_dof_conns()

        self.set_approx_order(approx_order)
        self.setup_geometry()

        # To refactor below...
        self.create_interpolant()
        self.setup_approximations()
##         print self.aps
##         pause()
        self.setup_global_base()
        self.setup_coors()
예제 #15
0
    def __init__(self, name, regions, dof_names, dof_map_fun, variables,
                 functions=None):
        Struct.__init__(self, name=name, regions=regions, dof_names=dof_names)

        if dof_map_fun is not None:
            self.dof_map_fun = get_condition_value(dof_map_fun, functions,
                                                   'LCBC', 'dof_map_fun')
        self._setup_dof_names(variables)
예제 #16
0
    def __init__(self, name, kind='time-dependent',
                 function=None, values=None, flags=None, **kwargs):
        """
        Parameters
        ----------
        name : str
            The name of the material.
        kind : 'time-dependent' or 'stationary'
            The kind of the material.
        function : function
            The function for setting up the material values.
        values : dict
            Constant material values.
        flags : dict, optional
            Special flags.
        **kwargs : keyword arguments, optional
            Constant material values passed by their names.
        """
        Struct.__init__(self, name=name, kind=kind, is_constant=False)

        if (function is not None) and ((values is not None) or len(kwargs)):
            msg = 'material can have function or values but not both! (%s)' \
                  % self.name
            raise ValueError(msg)

        self.flags = get_default(flags, {})

        if hasattr(function, '__call__'):
            self.function = function

        elif (values is not None) or len(kwargs): # => function is None
            if isinstance(values, dict):
                key0 = values.keys()[0]
                assert_(isinstance(key0, str))

            else:
                key0 = None

            if (key0 and (not key0.startswith('.'))
                and isinstance(values[key0], dict)):
                self.function = ConstantFunctionByRegion(values)
                self.is_constant = True

            else:
                all_values = {}
                if values is not None:
                    all_values.update(values)
                all_values.update(kwargs)

                self.function = ConstantFunction(all_values)
                self.is_constant = True

        else: # => both values and function are None
            msg = 'material %s: neither function nor values given! (%s)' \
                  % self.name
            raise ValueError(msg)

        self.reset()
예제 #17
0
파일: log.py 프로젝트: mikegraham/sfepy
    def __init__(self, data_names=None, yscales=None,
                 xlabels=None, ylabels=None, is_plot=True, aggregate=200,
                 formats=None, log_filename=None):
        """`data_names` ... tuple of names grouped by subplots:
                            ([name1, name2, ...], [name3, name4, ...], ...)
        where name<n> are strings to display in (sub)plot legends."""
        try:
            import matplotlib as mpl
        except:
            mpl = None

        if (mpl is not None) and mpl.rcParams['backend'] == 'GTKAgg':
            can_live_plot = True
        else:
            can_live_plot = False

        Struct.__init__(self, data_names = {},
                        n_arg = 0, n_gr = 0,
                        data = {}, x_values = {}, n_calls = 0,
                        yscales = {}, xlabels = {}, ylabels = {},
                        plot_pipe = None, formats = {}, output = None)

        if data_names is not None:
            n_gr = len(data_names)
        else:
            n_gr = 0
            data_names = []

        yscales = get_default(yscales, ['linear'] * n_gr)
        xlabels = get_default(xlabels, ['iteration'] * n_gr)
        ylabels = get_default(ylabels, [''] * n_gr )

        if formats is None:
            formats = [None] * n_gr

        for ig, names in enumerate(data_names):
            self.add_group(names, yscales[ig], xlabels[ig], ylabels[ig],
                           formats[ig])

        self.is_plot = get_default( is_plot, True )
        self.aggregate = get_default( aggregate, 100 )

        self.can_plot = (can_live_plot and (mpl is not None)
                         and (Process is not None))

        if log_filename is not None:
            self.output = Output('', filename=log_filename)
            self.output('# started: %s' % time.asctime())
            self.output('# groups: %d' % n_gr)
            for ig, names in enumerate(data_names):
                self.output('#   %d' % ig)
                self.output('#     xlabel: "%s", ylabel: "%s", yscales: "%s"'
                            % (xlabels[ig], ylabels[ig], yscales[ig]))
                self.output('#     names: "%s"' % ', '.join(names))

        if self.is_plot and (not self.can_plot):
            output(_msg_no_live)
예제 #18
0
    def __init__( self, name, problem, kwargs ):
        Struct.__init__( self, name = name, problem = problem, **kwargs )

        self.problem.clear_equations()
        self.set_default_attr('requires', [])
        self.set_default_attr('is_linear', False)
        self.set_default_attr('dtype', nm.float64)
        self.set_default_attr('term_mode', None)
        self.set_default_attr('set_volume', 'total')
예제 #19
0
파일: dof_info.py 프로젝트: ZJLi2013/sfepy
    def __init__(self, name):
        Struct.__init__(self, name=name)

        self.n_var = 0
        self.var_names = []
        self.n_dof = {}
        self.ptr = [0]
        self.indx = {}
        self.details = {}
예제 #20
0
    def __init__(self, filename, approx, region_selects, mat_pars, options,
                 evp_options, eigenmomenta_options, band_gaps_options,
                 coefs_save_name='coefs',
                 corrs_save_names=None,
                 incwd=None,
                 output_dir=None, **kwargs):
        Struct.__init__(self, approx=approx, region_selects=region_selects,
                        mat_pars=mat_pars, options=options,
                        evp_options=evp_options,
                        eigenmomenta_options=eigenmomenta_options,
                        band_gaps_options=band_gaps_options,
                        **kwargs)
        self.incwd = get_default(incwd, lambda x: x)

        self.conf = Struct()
        self.conf.filename_mesh = self.incwd(filename)

        output_dir = get_default(output_dir, self.incwd('output'))

        default = {'evp' : 'evp', 'corrs_rs' : 'corrs_rs'}
        self.corrs_save_names = get_default(corrs_save_names,
                                            default)

        io = MeshIO.any_from_filename(self.conf.filename_mesh)
        self.bbox, self.dim = io.read_bounding_box(ret_dim=True)
        rpc_axes = nm.eye(self.dim, dtype=nm.float64) \
                   * (self.bbox[1] - self.bbox[0])

        self.conf.options = options
        self.conf.options.update({
            'output_dir' : output_dir,

            'volume' : {
                'value' : get_lattice_volume(rpc_axes),
            },

            'coefs' : 'coefs',
            'requirements' : 'requirements',

            'coefs_filename' : coefs_save_name,
        })

        self.conf.mat_pars = mat_pars

        self.conf.solvers = self.define_solvers()
        self.conf.regions = self.define_regions()
        self.conf.materials = self.define_materials()
        self.conf.fields = self.define_fields()
        self.conf.variables = self.define_variables()
        (self.conf.ebcs, self.conf.epbcs,
         self.conf.lcbcs, self.all_periodic) = self.define_bcs()
        self.conf.functions = self.define_functions()
        self.conf.integrals = self.define_integrals()

        self.equations, self.expr_coefs = self.define_equations()
        self.conf.coefs = self.define_coefs()
        self.conf.requirements = self.define_requirements()
예제 #21
0
    def __init__(self, name, terms):
        Struct.__init__(self, name = name)

        if isinstance(terms, Term): # single Term
            terms = Terms([terms])

        self.terms = terms

        self.terms.setup()
예제 #22
0
    def __init__(self, name, nodes, region, field, dof_names, filename=None):
        Struct.__init__(self, name=name, nodes=nodes, dof_names=dof_names)

        dim = field.shape[0]
        assert_(len(dof_names) == dim)

        normals = compute_nodal_normals(nodes, region, field)

        if filename is not None:
            _save_normals(filename, normals, region, field.domain.mesh)

        ii = nm.abs(normals).argmax(1)
        n_nod, dim = normals.shape

        irs = set(range(dim))

        data = []
        rows = []
        cols = []
        for idim in xrange(dim):
            ic = nm.where(ii == idim)[0]
            if len(ic) == 0: continue
            ## print ic
            ## print idim

            ir = list(irs.difference([idim]))
            nn = nm.empty((len(ic), dim - 1), dtype=nm.float64)
            for ik, il in enumerate(ir):
                nn[:,ik] = - normals[ic,il] / normals[ic,idim]

            irn = dim * ic + idim
            ics = [(dim - 1) * ic + ik for ik in xrange(dim - 1)]
            for ik in xrange(dim - 1):
                rows.append(irn)
                cols.append(ics[ik])
                data.append(nn[:,ik])

            ones = nm.ones( (nn.shape[0],), dtype = nm.float64 )
            for ik, il in enumerate(ir):
                rows.append(dim * ic + il)
                cols.append(ics[ik])
                data.append(ones)

        ## print rows
        ## print cols
        ## print data

        rows = nm.concatenate(rows)
        cols = nm.concatenate(cols)
        data = nm.concatenate(data)

        n_np_dof = n_nod * (dim - 1)
        mtx = sp.coo_matrix((data, (rows, cols)), shape=(n_nod * dim, n_np_dof))

        self.n_dof = n_np_dof
        self.mtx = mtx.tocsr()
예제 #23
0
파일: mappings.py 프로젝트: snilek/sfepy
 def __init__(self, igs, n_total=0, is_uniform=True):
     Struct.__init__(self, igs=igs, n_total=n_total, indx={}, rindx={},
                     n_per_group={}, shape={}, values={},
                     is_uniform=is_uniform)
     for ig in self.igs:
         self.indx[ig] = slice(None)
         self.rindx[ig] = slice(None)
         self.n_per_group[ig] = 0
         self.shape[ig] = (0, 0, 0)
         self.values[ig] = nm.empty(self.shape[ig], dtype=nm.float64)
예제 #24
0
    def __init__(self, name, regions, dof_names, dof_map_fun, variables, functions=None):
        Struct.__init__(self, name=name, region=regions[0], dof_names=dof_names[0])

        self._setup_dof_names(variables)

        self.eq_map = variables[self.var_name].eq_map
        self.field = variables[self.var_name].field
        self.mdofs = self.field.get_dofs_in_region(self.region, merge=True)

        self.n_sdof = 0
예제 #25
0
    def __init__(self, name, dtype, shape, region, approx_order=1):
        """
        Create a Field.

        name : str
            The field name.
        dtype : numpy.dtype
            The field data type: float64 or complex128.
        shape : int/tuple/str
            The field shape: 1 or (1,) or 'scalar', space dimension (2, or
            (2,) or 3 or (3,)) or 'vector'. The field shape determines
            the shape of the FE base functions and can be different from
            a FieldVariable instance shape. (TODO)
        region : Region
            The region where the field is defined.
        approx_order : int/str
            The FE approximation order, e.g. 0, 1, 2, '1B' (1 with bubble).

        Notes
        -----
        Assumes one cell type for the whole region!
        """
        if isinstance(shape, basestr):
            try:
                shape = {'scalar' : (1,),
                         'vector' : (region.domain.shape.dim,)}[shape]
            except KeyError:
                raise ValueError('unsupported field shape! (%s)', shape)

        elif isinstance(shape, int):
            shape = (shape,)

        if not self._check_region(region):
            raise ValueError('unsuitable region for field %s! (%s)' %
                             (name, region.name))

        Struct.__init__(self,
                        name=name,
                        dtype=dtype,
                        shape=shape,
                        region=region)
        self.domain = self.region.domain
        self.igs = self.region.igs

        self.clear_dof_conns()

        self._set_approx_order(approx_order)
        self._setup_geometry()
        self._setup_kind()

        self._create_interpolant()
        self._setup_approximations()
        self._setup_global_base()
        self.setup_coors()
        self.clear_mappings(clear_all=True)
예제 #26
0
파일: domain.py 프로젝트: Gkdnz/sfepy
    def __init__(self, knots, degrees, cps,
                 weights, cs, conn):
        degrees = nm.asarray(degrees, dtype=nm.int32)
        cs = [nm.asarray(cc, dtype=nm.float64) for cc in cs]
        if cs[0].ndim == 3:
            cs = [nm.ascontiguousarray(cc[:, None, ...]) for cc in cs]

        Struct.__init__(self, name='nurbs', knots=knots, degrees=degrees,
                        cps=cps, weights=weights, cs=cs, conn=conn)
        self.n_els = [len(ii) for ii in cs]
        self.dim = len(self.n_els)
예제 #27
0
    def __init__(self, name, problem, kwargs):
        Struct.__init__(self, name=name, problem=problem, **kwargs)

        self.problem.clear_equations()
        self.set_default('requires', [])
        self.set_default('is_linear', False)
        self.set_default('dtype', nm.float64)
        self.set_default('term_mode', None)
        self.set_default('set_volume', 'total')

        # Application-specific options.
        self.app_options = self.process_options()
예제 #28
0
파일: fields.py 프로젝트: Gkdnz/sfepy
    def __init__(self, name, dtype, shape, region, approx_order=None,
                 **kwargs):
        """
        Create a Bezier element isogeometric analysis field.

        Parameters
        ----------
        name : str
            The field name.
        dtype : numpy.dtype
            The field data type: float64 or complex128.
        shape : int/tuple/str
            The field shape: 1 or (1,) or 'scalar', space dimension (2, or (2,)
            or 3 or (3,)) or 'vector', or a tuple. The field shape determines
            the shape of the FE base functions and is related to the number of
            components of variables and to the DOF per node count, depending
            on the field kind.
        region : Region
            The region where the field is defined.
        approx_order : str or tuple, optional
            The field approximation order string or tuple with the first
            component in the form 'iga+<nonnegative int>'. Other components are
            ignored. The nonnegative int corresponds to the number of times the
            degree is elevated by one w.r.t. the domain NURBS description.
        **kwargs : dict
            Additional keyword arguments.
        """
        shape = parse_shape(shape, region.domain.shape.dim)

        if approx_order is None:
            elevate_times = 0

        else:
            if isinstance(approx_order, basestr): approx_order = (approx_order,)
            elevate_times = parse_approx_order(approx_order[0])

        Struct.__init__(self, name=name, dtype=dtype, shape=shape,
                        region=region, elevate_times=elevate_times)

        self.domain = self.region.domain
        self.nurbs = self.domain.nurbs.elevate(elevate_times)

        self._setup_kind()

        self.n_components = nm.prod(self.shape)
        self.val_shape = self.shape
        self.n_nod = self.nurbs.weights.shape[0]
        self.n_efun = nm.prod(self.nurbs.degrees + 1)
        self.approx_order = self.nurbs.degrees.max()

        self.mappings = {}

        self.is_surface = False
예제 #29
0
파일: fields_base.py 프로젝트: lokik/sfepy
    def __init__(self, name, dtype, shape, region, approx_order=1):
        """
        Create a finite element field.

        Parameters
        ----------
        name : str
            The field name.
        dtype : numpy.dtype
            The field data type: float64 or complex128.
        shape : int/tuple/str
            The field shape: 1 or (1,) or 'scalar', space dimension (2, or (2,)
            or 3 or (3,)) or 'vector', or a tuple. The field shape determines
            the shape of the FE base functions and is related to the number of
            components of variables and to the DOF per node count, depending
            on the field kind.
        region : Region
            The region where the field is defined.
        approx_order : int or tuple
            The FE approximation order. The tuple form is (order, has_bubble),
            e.g. (1, True) means order 1 with a bubble function.

        Notes
        -----
        Assumes one cell type for the whole region!
        """
        shape = parse_shape(shape, region.domain.shape.dim)
        if not self._check_region(region):
            raise ValueError('unsuitable region for field %s! (%s)' %
                             (name, region.name))

        Struct.__init__(self, name=name, dtype=dtype, shape=shape,
                        region=region)
        self.domain = self.region.domain

        self._set_approx_order(approx_order)
        self._setup_geometry()
        self._setup_kind()
        self._setup_shape()

        self.surface_data = {}
        self.point_data = {}
        self.ori = None
        self._create_interpolant()
        self._setup_global_base()
        self.setup_coors()
        self.clear_mappings(clear_all=True)
        self.clear_qp_base()
        self.basis_transform = None
        self.econn0 = None
        self.unused_dofs = None
        self.stored_subs = None
예제 #30
0
파일: dof_info.py 프로젝트: ZJLi2013/sfepy
    def __init__(self, name, nodes, region, field, dof_names):
        Struct.__init__(self, name=name, nodes=nodes, dof_names=dof_names)

        dpn = len(dof_names)
        n_nod = nodes.shape[0]

        data = nm.ones((n_nod * dpn,))
        rows = nm.arange(data.shape[0])
        cols = nm.zeros((data.shape[0],))

        mtx = sp.coo_matrix((data, (rows, cols)), shape=(n_nod * dpn, dpn))

        self.n_dof = dpn
        self.mtx = mtx.tocsr()
예제 #31
0
 def process_options(self):
     get = self.options.get
     return Struct(mode=get('mode', 'simple'),
                   incident_wave_dir=get('incident_wave_dir', None))
예제 #32
0
파일: terms.py 프로젝트: chiao45/sfepy
 def get_dof_conn_type(self):
     return Struct(name='dof_conn_info',
                   type=self.dof_conn_type,
                   region_name=self.region.name)
예제 #33
0
    def create_output(self, dofs, var_name, dof_names=None,
                      key=None, extend=True, fill_value=None,
                      linearization=None):
        """
        Convert the DOFs corresponding to the field to a dictionary of
        output data usable by Mesh.write().

        Parameters
        ----------
        dofs : array, shape (n_nod, n_component)
            The array of DOFs reshaped so that each column corresponds
            to one component.
        var_name : str
            The variable name corresponding to `dofs`.
        dof_names : tuple of str
            The names of DOF components.
        key : str, optional
            The key to be used in the output dictionary instead of the
            variable name.
        extend : bool
            Extend the DOF values to cover the whole domain.
        fill_value : float or complex
           The value used to fill the missing DOF values if `extend` is True.
        linearization : Struct or None
            The linearization configuration for higher order approximations.

        Returns
        -------
        out : dict
            The output dictionary.
        """
        linearization = get_default(linearization, Struct(kind='strip'))

        out = {}
        if linearization.kind is None:
            out[key] = Struct(name='output_data', mode='full',
                              data=dofs, var_name=var_name,
                              dofs=dof_names, field_name=self.name)

        elif linearization.kind == 'strip':
            if extend:
                ext = self.extend_dofs(dofs, fill_value)

            else:
                ext = self.remove_extra_dofs(dofs)

            if ext is not None:
                approx_order = self.get_output_approx_order()

                if approx_order != 0:
                    # Has vertex data.
                    out[key] = Struct(name='output_data', mode='vertex',
                                      data=ext, var_name=var_name,
                                      dofs=dof_names)

                else:
                    ext.shape = (ext.shape[0], 1, ext.shape[1], 1)
                    out[key] = Struct(name='output_data', mode='cell',
                                      data=ext, var_name=var_name,
                                      dofs=dof_names)

        else:
            mesh, vdofs, levels = self.linearize(dofs,
                                                 linearization.min_level,
                                                 linearization.max_level,
                                                 linearization.eps)
            out[key] = Struct(name='output_data', mode='vertex',
                              data=vdofs, var_name=var_name, dofs=dof_names,
                              mesh=mesh, levels=levels)

        out = convert_complex_output(out)

        return out
예제 #34
0
    def call(self):
        """
        Construct and call the homogenization engine accoring to options.
        """
        options = self.options

        opts = self.app_options
        conf = self.problem.conf
        coefs_name = opts.coefs
        coef_info = conf.get(opts.coefs, None,
                             'missing "%s" in problem description!'
                             % opts.coefs)

        if options.detect_band_gaps:
            # Compute band gaps coefficients and data.
            keys = [key for key in coef_info if key.startswith('band_gaps')]

        elif options.analyze_dispersion or options.phase_velocity:

            # Insert incident wave direction to coefficients that need it.
            for key, val in six.iteritems(coef_info):
                coef_opts = val.get('options', None)
                if coef_opts is None: continue

                if (('incident_wave_dir' in coef_opts)
                    and (coef_opts['incident_wave_dir'] is None)):
                    coef_opts['incident_wave_dir'] = opts.incident_wave_dir

            if options.analyze_dispersion:
                # Compute dispersion coefficients and data.
                keys = [key for key in coef_info
                        if key.startswith('dispersion')
                        or key.startswith('polarization_angles')]

            else:
                # Compute phase velocity and its requirements.
                keys = [key for key in coef_info
                        if key.startswith('phase_velocity')]

        else:
            # Compute only the eigenvalue problems.
            names = [req for req in conf.get(opts.requirements, [''])
                     if req.startswith('evp')]
            coefs = {'dummy' : {'requires' : names,
                                'class' : CoefDummy,}}
            conf.coefs_dummy = coefs
            coefs_name = 'coefs_dummy'
            keys = ['dummy']

        he_options = Struct(coefs=coefs_name, requirements=opts.requirements,
                            compute_only=keys,
                            post_process_hook=self.post_process_hook,
                            multiprocessing=False)

        volumes = {}
        if hasattr(opts, 'volumes') and (opts.volumes is not None):
            volumes.update(opts.volumes)
        elif hasattr(opts, 'volume') and (opts.volume is not None):
            volumes['total'] = opts.volume
        else:
            volumes['total'] = 1.0

        he = HomogenizationEngine(self.problem, options,
                                  app_options=he_options,
                                  volumes=volumes)
        coefs = he()

        coefs = Coefficients(**coefs.to_dict())

        coefs_filename = op.join(opts.output_dir, opts.coefs_filename)
        coefs.to_file_txt(coefs_filename + '.txt',
                          opts.tex_names,
                          opts.float_format)

        bg_keys = [key for key in coefs.to_dict()
                   if key.startswith('band_gaps')
                   or key.startswith('dispersion')]
        for ii, key in enumerate(bg_keys):
            bg = coefs.get(key)
            log_save_name = bg.get('log_save_name', None)
            if log_save_name is not None:
                filename = op.join(self.problem.output_dir, log_save_name)
                bg.save_log(filename, opts.float_format, bg)

        if options.plot:
            if options.detect_band_gaps:
                self.plot_band_gaps(coefs)

            elif options.analyze_dispersion:
                self.plot_dispersion(coefs)

        elif options.phase_velocity:
            keys = [key for key in coefs.to_dict()
                    if key.startswith('phase_velocity')]
            for key in keys:
                output('%s:' % key, coefs.get(key))

        return coefs
예제 #35
0
    def test_hdf5_meshio(self):
        try:
            from igakit import igalib
        except ImportError:
            self.report('hdf5_meshio not-tested (missing igalib module)!')
            return True

        import tempfile
        import numpy as nm
        import scipy.sparse as sps
        from sfepy.discrete.fem.meshio import HDF5MeshIO
        from sfepy.base.base import Struct
        from sfepy.base.ioutils import Cached, Uncached, SoftLink, \
                                       DataSoftLink
        from sfepy.discrete.iga.domain import IGDomain
        from sfepy.discrete.iga.domain_generators import gen_patch_block_domain
        from sfepy.solvers.ts import TimeStepper
        from sfepy.discrete.fem import Mesh

        conf_dir = op.dirname(__file__)
        mesh0 = Mesh.from_file(data_dir +
                               '/meshes/various_formats/small3d.mesh',
                               prefix_dir=conf_dir)

        shape = [4, 4, 4]
        dims = [5, 5, 5]
        centre = [0, 0, 0]
        degrees = [2, 2, 2]

        nurbs, bmesh, regions = gen_patch_block_domain(dims, shape, centre,
                                                       degrees,
                                                       cp_mode='greville',
                                                       name='iga')
        ig_domain = IGDomain('iga', nurbs, bmesh, regions=regions)

        int_ar = nm.arange(4)

        data = {
            'list': range(4),
            'mesh1': mesh0,
            'mesh2': mesh0,
            'mesh3': Uncached(mesh0),
            'mesh4': SoftLink('/step0/__cdata/data/data/mesh2'),
            'mesh5': DataSoftLink('Mesh','/step0/__cdata/data/data/mesh1/data'),
            'mesh6': DataSoftLink('Mesh','/step0/__cdata/data/data/mesh2/data',
                mesh0),
            'mesh7': DataSoftLink('Mesh','/step0/__cdata/data/data/mesh1/data',
                True),
            'iga' : ig_domain,
            'cached1': Cached(1),
            'cached2': Cached(int_ar),
            'cached3': Cached(int_ar),
            'types': ( True, False, None ),
            'tuple': ('first string', 'druhý UTF8 řetězec'),
            'struct': Struct(
                double=nm.arange(4, dtype=float),
                int=nm.array([2,3,4,7]),
                sparse=sps.csr_matrix(nm.array([1,0,0,5]).
                                      reshape((2,2)))
             )
        }

        with tempfile.NamedTemporaryFile(suffix='.h5', delete=False) as fil:
            io = HDF5MeshIO(fil.name)
            ts = TimeStepper(0,1.,0.1, 10)

            io.write(fil.name, mesh0, {
                'cdata' : Struct(
                    mode='custom',
                    data=data,
                    unpack_markers=False
                )
            }, ts=ts)
            ts.advance()

            mesh = io.read()
            data['problem_mesh'] = DataSoftLink('Mesh', '/mesh', mesh)

            io.write(fil.name, mesh0, {
                'cdata' : Struct(
                    mode='custom',
                    data=data,
                    unpack_markers=True
                )
            }, ts=ts)

            cache = {'/mesh': mesh }
            fout = io.read_data(0, cache=cache)
            fout2 = io.read_data(1, cache=cache )
            out = fout['cdata']
            out2 = fout2['cdata']

            assert_(out['mesh7'] is out2['mesh7'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh6'] is out2['mesh6'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh5'] is not out2['mesh5'],
                'These two meshes shouldn''t be in fact the same object')

            assert_(out['mesh1'] is out['mesh2'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh1'] is out['mesh2'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh4'] is out['mesh2'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh5'] is not out['mesh2'],
                'These two meshes shouldn''t be in fact the same object')

            assert_(out['mesh6'] is out['mesh2'],
                'These two meshes should be in fact the same object')

            assert_(out['mesh7'] is not out['mesh2'],
                'These two meshes shouldn''t be in fact the same object')

            assert_(out['mesh3'] is not out['mesh2'],
                'These two meshes should be different objects')

            assert_(out['cached2'] is out['cached3'],
                'These two array should be the same object')

            assert_(out2['problem_mesh'] is mesh,
                'These two meshes should be the same objects')

            assert_(self._compare_meshes(out['mesh1'], mesh0),
                'Failed to restore mesh')

            assert_(self._compare_meshes(out['mesh3'], mesh0),
                'Failed to restore mesh')

            assert_((out['struct'].sparse == data['struct'].sparse).todense()
                    .all(), 'Sparse matrix restore failed')

            ts.advance()
            io.write(fil.name, mesh0, {
                    'cdata' : Struct(
                        mode='custom',
                        data=[
                            DataSoftLink('Mesh',
                                         '/step0/__cdata/data/data/mesh1/data',
                                         mesh0),
                            mesh0
                        ]
                    )
            }, ts=ts)
            out3 = io.read_data(2)['cdata']
            assert_(out3[0] is out3[1])

        os.remove(fil.name)

        #this property is not restored
        del data['iga'].nurbs.nurbs

        #not supporting comparison
        del data['iga']._bnf
        del out2['iga']._bnf

        #restoration of this property fails
        del data['iga'].vertex_set_bcs
        del out2['iga'].vertex_set_bcs

        #these soflink has no information how to unpack, so it must be
        #done manually
        data['mesh4'] = mesh0
        data['mesh5'] = mesh0
        data['mesh7'] = mesh0

        for key, val in six.iteritems(out2):
            self.report('comparing:', key)
            self.assert_equal(val, data[key])

        return True
예제 #36
0
# ----------------------------
def ic_wrap(x, ic=None):
    return ghump(x - .3)


ic_fun = Function('ic_fun', ic_wrap)
ics = InitialCondition('ic', omega, {'u.0': ic_fun})

# ------------------
# | Create problem |
# ------------------
pb = Problem(problem_name,
             equations=eqs,
             conf=Struct(options={"save_times": save_timestn},
                         ics={},
                         ebcs={},
                         epbcs={},
                         lcbcs={},
                         materials={}),
             active_only=False)
pb.setup_output(output_dir=output_folder, output_format=output_format)
pb.set_ics(Conditions([ics]))

# ------------------
# | Create limiter |
# ------------------
limiter = MomentLimiter1D

# ---------------------------
# | Set time discretization |
# ---------------------------
CFL = .1
예제 #37
0
    def get_evaluate_cache(self, cache=None, share_geometry=False,
                           verbose=False):
        """
        Get the evaluate cache for :func:`Variable.evaluate_at()
        <sfepy.discrete.variables.Variable.evaluate_at()>`.

        Parameters
        ----------
        cache : Struct instance, optional
            Optionally, use the provided instance to store the cache data.
        share_geometry : bool
            Set to True to indicate that all the evaluations will work on the
            same region. Certain data are then computed only for the first
            probe and cached.
        verbose : bool
            If False, reduce verbosity.

        Returns
        -------
        cache : Struct instance
            The evaluate cache.
        """
        import time

        try:
            from scipy.spatial import cKDTree as KDTree
        except ImportError:
            from scipy.spatial import KDTree

        from sfepy.discrete.fem.geometry_element import create_geometry_elements

        if cache is None:
            cache = Struct(name='evaluate_cache')

        tt = time.clock()
        if (cache.get('cmesh', None) is None) or not share_geometry:
            mesh = self.create_mesh(extra_nodes=False)
            cache.cmesh = cmesh = mesh.cmesh

            gels = create_geometry_elements()

            cmesh.set_local_entities(gels)
            cmesh.setup_entities()

            cache.centroids = cmesh.get_centroids(cmesh.tdim)

            if self.gel.name != '3_8':
                cache.normals0 = cmesh.get_facet_normals()
                cache.normals1 = None

            else:
                cache.normals0 = cmesh.get_facet_normals(0)
                cache.normals1 = cmesh.get_facet_normals(1)

        output('cmesh setup: %f s' % (time.clock()-tt), verbose=verbose)

        tt = time.clock()
        if (cache.get('kdtree', None) is None) or not share_geometry:
            cache.kdtree = KDTree(cmesh.coors)

        output('kdtree: %f s' % (time.clock()-tt), verbose=verbose)

        return cache
예제 #38
0
class ProblemDefinition(Struct):
    """
    Problem definition, the top-level class holding all data necessary to solve
    a problem.

    It can be constructed from a :class:`ProblemConf` instance using
    `ProblemDefinition.from_conf()` or directly from a problem
    description file using `ProblemDefinition.from_conf_file()`

    For interactive use, the constructor requires only the `equations`,
    `nls` and `ls` keyword arguments.
    """
    @staticmethod
    def from_conf_file(conf_filename,
                       required=None,
                       other=None,
                       init_fields=True,
                       init_equations=True,
                       init_solvers=True):

        _required, _other = get_standard_keywords()
        if required is None:
            required = _required
        if other is None:
            other = _other

        conf = ProblemConf.from_file(conf_filename, required, other)

        obj = ProblemDefinition.from_conf(conf,
                                          init_fields=init_fields,
                                          init_equations=init_equations,
                                          init_solvers=init_solvers)
        return obj

    @staticmethod
    def from_conf(conf,
                  init_fields=True,
                  init_equations=True,
                  init_solvers=True):
        if conf.options.get('absolute_mesh_path', False):
            conf_dir = None
        else:
            conf_dir = op.dirname(conf.funmod.__file__)

        functions = Functions.from_conf(conf.functions)

        mesh = Mesh.from_file(conf.filename_mesh, prefix_dir=conf_dir)

        trans_mtx = conf.options.get('mesh_coors_transform', None)

        if trans_mtx is not None:
            mesh.transform_coors(trans_mtx)

        domain = Domain(mesh.name, mesh)
        if conf.options.get('ulf', False):
            domain.mesh.coors_act = domain.mesh.coors.copy()

        obj = ProblemDefinition('problem_from_conf',
                                conf=conf,
                                functions=functions,
                                domain=domain,
                                auto_conf=False,
                                auto_solvers=False)

        obj.set_regions(conf.regions, obj.functions)

        obj.clear_equations()

        if init_fields:
            obj.set_fields(conf.fields)

            if init_equations:
                obj.set_equations(conf.equations, user={'ts': obj.ts})

        if init_solvers:
            obj.set_solvers(conf.solvers, conf.options)

        return obj

    def __init__(self,
                 name,
                 conf=None,
                 functions=None,
                 domain=None,
                 fields=None,
                 equations=None,
                 auto_conf=True,
                 nls=None,
                 ls=None,
                 ts=None,
                 auto_solvers=True):
        self.name = name
        self.conf = conf
        self.functions = functions

        self.reset()

        self.ts = get_default(ts, self.get_default_ts())

        if auto_conf:
            if equations is None:
                raise ValueError('missing equations in auto_conf mode!')

            if fields is None:
                variables = equations.variables
                fields = {}
                for field in [var.get_field() for var in variables]:
                    fields[field.name] = field

            if domain is None:
                domain = fields.values()[0].domain

            if conf is None:
                self.conf = Struct(ebcs={}, epbcs={}, lcbcs={})

        self.equations = equations
        self.fields = fields
        self.domain = domain

        if auto_solvers:
            if ls is None:
                ls = ScipyDirect({})

            if nls is None:
                nls = Newton({}, lin_solver=ls)

            ev = self.get_evaluator()
            nls.fun = ev.eval_residual
            nls.fun_grad = ev.eval_tangent_matrix

            self.solvers = Struct(name='solvers', ls=ls, nls=nls)

        self.setup_output()

    def reset(self):
        if hasattr(self.conf, 'options'):
            self.setup_hooks(self.conf.options)

        else:
            self.setup_hooks()

        self.mtx_a = None
        self.solvers = None
        self.clear_equations()

    def setup_hooks(self, options=None):
        """
        Setup various hooks (user-defined functions), as given in `options`.

        Supported hooks:

          - `matrix_hook`

            - check/modify tangent matrix in each nonlinear solver
              iteration

          - `nls_iter_hook`

            - called prior to every iteration of nonlinear solver, if the
              solver supports that
            - takes the ProblemDefinition instance (`self`) as the first
              argument
        """
        hook_names = ['nls_iter_hook', 'matrix_hook']
        for hook_name in hook_names:
            setattr(self, hook_name, None)
            if options is not None:
                hook = options.get(hook_name, None)
                if hook is not None:
                    hook = self.conf.get_function(hook)
                    setattr(self, hook_name, hook)

        iter_hook = self.nls_iter_hook
        if iter_hook is not None:
            self.nls_iter_hook = lambda *args, **kwargs: \
                                 iter_hook(self, *args, **kwargs)

    def copy(self, name=None):
        """
        Make a copy of ProblemDefinition.
        """
        if name is None:
            name = self.name + '_copy'
        obj = ProblemDefinition(name,
                                conf=self.conf,
                                functions=self.functions,
                                domain=self.domain,
                                fields=self.fields,
                                equations=self.equations,
                                auto_conf=False,
                                auto_solvers=False)

        obj.ebcs = self.ebcs
        obj.epbcs = self.epbcs
        obj.lcbcs = self.lcbcs

        obj.set_solvers(self.conf.solvers, self.conf.options)

        obj.setup_output(output_filename_trunk=self.ofn_trunk,
                         output_dir=self.output_dir,
                         output_format=self.output_format,
                         file_per_var=self.file_per_var,
                         linearization=self.linearization)

        return obj

    def create_subproblem(self, var_names, known_var_names):
        """
        Create a sub-problem with equations containing only terms with the
        given virtual variables.

        Parameters
        ----------
        var_names : list
            The list of names of virtual variables.
        known_var_names : list
            The list of  names of (already) known state variables.

        Returns
        -------
        subpb : ProblemDefinition instance
            The sub-problem.
        """
        subpb = ProblemDefinition(self.name + '_' + '_'.join(var_names),
                                  conf=self.conf,
                                  functions=self.functions,
                                  domain=self.domain,
                                  fields=self.fields,
                                  auto_conf=False,
                                  auto_solvers=False)
        subpb.set_solvers(self.conf.solvers, self.conf.options)

        subeqs = self.equations.create_subequations(var_names, known_var_names)
        subpb.set_equations_instance(subeqs, keep_solvers=True)

        return subpb

    def setup_default_output(self, conf=None, options=None):
        """
        Provide default values to `ProblemDefinition.setup_output()`
        from `conf.options` and `options`.
        """
        conf = get_default(conf, self.conf)

        if options and getattr(options, 'output_filename_trunk', None):
            default_output_dir, of = op.split(options.output_filename_trunk)
            default_trunk = io.get_trunk(of)

        else:
            default_trunk = None
            default_output_dir = conf.options.get('output_dir', None)

        if options and getattr(options, 'output_format', None):
            default_output_format = options.output_format

        else:
            default_output_format = conf.options.get('output_format', None)

        default_file_per_var = conf.options.get('file_per_var', None)
        default_float_format = conf.options.get('float_format', None)
        default_linearization = Struct(kind='strip')

        self.setup_output(output_filename_trunk=default_trunk,
                          output_dir=default_output_dir,
                          file_per_var=default_file_per_var,
                          output_format=default_output_format,
                          float_format=default_float_format,
                          linearization=default_linearization)

    def setup_output(self,
                     output_filename_trunk=None,
                     output_dir=None,
                     output_format=None,
                     float_format=None,
                     file_per_var=None,
                     linearization=None):
        """
        Sets output options to given values, or uses the defaults for
        each argument that is None.
        """
        self.output_modes = {'vtk': 'sequence', 'h5': 'single'}

        self.ofn_trunk = get_default(output_filename_trunk,
                                     io.get_trunk(self.domain.name))

        self.set_output_dir(output_dir)

        self.output_format = get_default(output_format, 'vtk')
        self.float_format = get_default(float_format, None)
        self.file_per_var = get_default(file_per_var, False)
        self.linearization = get_default(linearization, Struct(kind='strip'))

        if ((self.output_format == 'h5')
                and (self.linearization.kind == 'adaptive')):
            self.linearization.kind = None

    def set_output_dir(self, output_dir=None):
        """
        Set the directory for output files.

        The directory is created if it does not exist.
        """
        self.output_dir = get_default(output_dir, os.curdir)

        if self.output_dir and not op.exists(self.output_dir):
            os.makedirs(self.output_dir)

    def set_regions(self,
                    conf_regions=None,
                    conf_materials=None,
                    functions=None):
        conf_regions = get_default(conf_regions, self.conf.regions)
        functions = get_default(functions, self.functions)

        self.domain.create_regions(conf_regions, functions)

    def set_materials(self, conf_materials=None):
        """
        Set definition of materials.
        """
        self.conf_materials = get_default(conf_materials, self.conf.materials)

    def select_materials(self, material_names, only_conf=False):
        if type(material_names) == dict:
            conf_materials = transform_materials(material_names)

        else:
            conf_materials = select_by_names(self.conf.materials,
                                             material_names)

        if not only_conf:
            self.set_materials(conf_materials)

        return conf_materials

    def set_fields(self, conf_fields=None):
        conf_fields = get_default(conf_fields, self.conf.fields)
        self.fields = fields_from_conf(conf_fields, self.domain.regions)

    def set_variables(self, conf_variables=None):
        """
        Set definition of variables.
        """
        self.conf_variables = get_default(conf_variables, self.conf.variables)
        self.reset()

    def select_variables(self, variable_names, only_conf=False):
        if type(variable_names) == dict:
            conf_variables = transform_variables(variable_names)

        else:
            conf_variables = select_by_names(self.conf.variables,
                                             variable_names)

        if not only_conf:
            self.set_variables(conf_variables)

        return conf_variables

    def clear_equations(self):
        self.integrals = None
        self.equations = None
        self.ebcs = None
        self.epbcs = None
        self.lcbcs = None

    def set_equations(self,
                      conf_equations=None,
                      user=None,
                      keep_solvers=False,
                      make_virtual=False):
        """
        Set equations of the problem using the `equations` problem
        description entry.

        Fields and Regions have to be already set.
        """
        conf_equations = get_default(conf_equations,
                                     self.conf.get('equations', None))

        self.set_variables()
        variables = Variables.from_conf(self.conf_variables, self.fields)

        self.set_materials()
        materials = Materials.from_conf(self.conf_materials, self.functions)

        self.integrals = self.get_integrals()
        equations = Equations.from_conf(conf_equations,
                                        variables,
                                        self.domain.regions,
                                        materials,
                                        self.integrals,
                                        user=user,
                                        make_virtual=make_virtual)

        self.equations = equations

        if not keep_solvers:
            self.solvers = None

    def set_equations_instance(self, equations, keep_solvers=False):
        """
        Set equations of the problem to `equations`.
        """
        self.mtx_a = None
        self.clear_equations()
        self.equations = equations

        if not keep_solvers:
            self.solvers = None

    def set_solvers(self, conf_solvers=None, options=None):
        """
        Choose which solvers should be used. If solvers are not set in
        `options`, use first suitable in `conf_solvers`.
        """
        conf_solvers = get_default(conf_solvers, self.conf.solvers)
        self.solver_confs = {}
        for key, val in conf_solvers.iteritems():
            self.solver_confs[val.name] = val

        def _find_suitable(prefix):
            for key, val in self.solver_confs.iteritems():
                if val.kind.find(prefix) == 0:
                    return val
            return None

        def _get_solver_conf(kind):
            try:
                key = options[kind]
                conf = self.solver_confs[key]
            except:
                conf = _find_suitable(kind + '.')
            return conf

        self.ts_conf = _get_solver_conf('ts')
        if self.ts_conf is None:
            self.ts_conf = Struct(name='no ts', kind='ts.stationary')

        self.nls_conf = _get_solver_conf('nls')
        self.ls_conf = _get_solver_conf('ls')

        info = 'using solvers:'
        if self.ts_conf:
            info += '\n                ts: %s' % self.ts_conf.name
        if self.nls_conf:
            info += '\n               nls: %s' % self.nls_conf.name
        if self.ls_conf:
            info += '\n                ls: %s' % self.ls_conf.name
        if info != 'using solvers:':
            output(info)

    def set_solvers_instances(self, ls=None, nls=None):
        """
        Set the instances of linear and nonlinear solvers that will be
        used in `ProblemDefinition.solve()` call.
        """
        if (ls is not None) and (nls is not None):
            if not (nls.lin_solver is ls):
                raise ValueError('linear solver not used in nonlinear!')

        self.solvers = Struct(name='solvers', ls=ls, nls=nls)

    def get_solver_conf(self, name):
        return self.solver_confs[name]

    def get_default_ts(self,
                       t0=None,
                       t1=None,
                       dt=None,
                       n_step=None,
                       step=None):
        t0 = get_default(t0, 0.0)
        t1 = get_default(t1, 1.0)
        dt = get_default(dt, 1.0)
        n_step = get_default(n_step, 1)

        ts = TimeStepper(t0, t1, dt, n_step, step=step)

        return ts

    def get_integrals(self, names=None):
        """
        Get integrals, initialized from problem configuration if available.

        Parameters
        ----------
        names : list, optional
            If given, only the named integrals are returned.

        Returns
        -------
        integrals : Integrals instance
            The requested integrals.
        """
        conf_integrals = self.conf.get('integrals', {})
        integrals = Integrals.from_conf(conf_integrals)

        if names is not None:
            integrals.update(
                [integrals[ii] for ii in names if ii in integrals.names])

        return integrals

    def update_time_stepper(self, ts):
        if ts is not None:
            self.ts = ts

    def update_materials(self, ts=None, mode='normal'):
        """
        Update materials used in equations.
        """
        if self.equations is not None:
            self.update_time_stepper(ts)
            self.equations.time_update_materials(self.ts,
                                                 mode=mode,
                                                 problem=self)

    def update_equations(self,
                         ts=None,
                         ebcs=None,
                         epbcs=None,
                         lcbcs=None,
                         functions=None,
                         create_matrix=False):
        """
        Update equations for current time step.

        The tangent matrix graph is automatically recomputed if the set
        of active essential or periodic boundary conditions changed
        w.r.t. the previous time step.

        Parameters
        ----------
        ts : TimeStepper instance, optional
            The time stepper. If not given, `self.ts` is used.
        ebcs : Conditions instance, optional
            The essential (Dirichlet) boundary conditions. If not given,
            `self.ebcs` are used.
        epbcs : Conditions instance, optional
            The periodic boundary conditions. If not given, `self.epbcs`
            are used.
        lcbcs : Conditions instance, optional
            The linear combination boundary conditions. If not given,
            `self.lcbcs` are used.
        functions : Functions instance, optional
            The user functions for boundary conditions, materials,
            etc. If not given, `self.functions` are used.
        """
        self.update_time_stepper(ts)
        functions = get_default(functions, self.functions)

        graph_changed = self.equations.time_update(self.ts, ebcs, epbcs, lcbcs,
                                                   functions, self)
        self.graph_changed = graph_changed

        if graph_changed or (self.mtx_a is None) or create_matrix:
            self.mtx_a = self.equations.create_matrix_graph()
            ## import sfepy.base.plotutils as plu
            ## plu.spy(self.mtx_a)
            ## plu.plt.show()

    def set_bcs(self, ebcs=None, epbcs=None, lcbcs=None):
        """
        Update boundary conditions.
        """
        if isinstance(ebcs, Conditions):
            self.ebcs = ebcs

        else:
            conf_ebc = get_default(ebcs, self.conf.ebcs)
            self.ebcs = Conditions.from_conf(conf_ebc, self.domain.regions)

        if isinstance(epbcs, Conditions):
            self.epbcs = epbcs

        else:
            conf_epbc = get_default(epbcs, self.conf.epbcs)
            self.epbcs = Conditions.from_conf(conf_epbc, self.domain.regions)

        if isinstance(lcbcs, Conditions):
            self.lcbcs = lcbcs

        else:
            conf_lcbc = get_default(lcbcs, self.conf.lcbcs)
            self.lcbcs = Conditions.from_conf(conf_lcbc, self.domain.regions)

    def time_update(self,
                    ts=None,
                    ebcs=None,
                    epbcs=None,
                    lcbcs=None,
                    functions=None,
                    create_matrix=False):
        self.set_bcs(ebcs, epbcs, lcbcs)
        self.update_equations(ts, self.ebcs, self.epbcs, self.lcbcs, functions,
                              create_matrix)

    def setup_ic(self, conf_ics=None, functions=None):
        conf_ics = get_default(conf_ics, self.conf.ics)
        ics = Conditions.from_conf(conf_ics, self.domain.regions)

        functions = get_default(functions, self.functions)

        self.equations.setup_initial_conditions(ics, functions)

    def select_bcs(self,
                   ebc_names=None,
                   epbc_names=None,
                   lcbc_names=None,
                   create_matrix=False):

        if ebc_names is not None:
            conf_ebc = select_by_names(self.conf.ebcs, ebc_names)
        else:
            conf_ebc = None

        if epbc_names is not None:
            conf_epbc = select_by_names(self.conf.epbcs, epbc_names)
        else:
            conf_epbc = None

        if lcbc_names is not None:
            conf_lcbc = select_by_names(self.conf.lcbcs, lcbc_names)
        else:
            conf_lcbc = None

        self.set_bcs(conf_ebc, conf_epbc, conf_lcbc)
        self.update_equations(self.ts, self.ebcs, self.epbcs, self.lcbcs,
                              self.functions, create_matrix)

    def get_timestepper(self):
        return self.ts

    def create_state(self):
        return State(self.equations.variables)

    def get_mesh_coors(self):
        return self.domain.get_mesh_coors()

    def set_mesh_coors(self,
                       coors,
                       update_fields=False,
                       actual=False,
                       clear_all=True):
        """
        Set mesh coordinates.

        Parameters
        ----------
        coors : array
            The new coordinates.
        update_fields : bool
            If True, update also coordinates of fields.
        actual : bool
            If True, update the actual configuration coordinates,
            otherwise the undeformed configuration ones.
        """
        fea.set_mesh_coors(self.domain,
                           self.fields,
                           coors,
                           update_fields=update_fields,
                           actual=actual,
                           clear_all=clear_all)

    def refine_uniformly(self, level):
        """
        Refine the mesh uniformly `level`-times.

        Notes
        -----
        This operation resets almost everything (fields, equations, ...)
        - it is roughly equivalent to creating a new ProblemDefinition
        instance with the refined mesh.
        """
        if level == 0: return

        domain = self.domain
        for ii in range(level):
            domain = domain.refine()

        self.domain = domain
        self.set_regions(self.conf.regions, self.functions)
        self.clear_equations()

        self.set_fields(self.conf.fields)
        self.set_equations(self.conf.equations, user={'ts': self.ts})

    def get_dim(self, get_sym=False):
        """Returns mesh dimension, symmetric tensor dimension (if `get_sym` is
        True).
        """
        dim = self.domain.mesh.dim
        if get_sym:
            return dim, (dim + 1) * dim / 2
        else:
            return dim

    def init_time(self, ts):
        self.update_time_stepper(ts)
        self.equations.init_time(ts)
        self.update_materials(mode='force')

    def advance(self, ts=None):
        self.update_time_stepper(ts)
        self.equations.advance(self.ts)

    def save_state(self,
                   filename,
                   state=None,
                   out=None,
                   fill_value=None,
                   post_process_hook=None,
                   linearization=None,
                   file_per_var=False,
                   **kwargs):
        """
        Parameters
        ----------
        file_per_var : bool or None
            If True, data of each variable are stored in a separate
            file. If None, it is set to the application option value.
        linearization : Struct or None
            The linearization configuration for higher order
            approximations. If its kind is 'adaptive', `file_per_var` is
            assumed True.
        """
        linearization = get_default(linearization, self.linearization)
        if linearization.kind != 'adaptive':
            file_per_var = get_default(file_per_var, self.file_per_var)

        else:
            file_per_var = True

        extend = not file_per_var
        if (out is None) and (state is not None):
            out = state.create_output_dict(fill_value=fill_value,
                                           extend=extend,
                                           linearization=linearization)

            if post_process_hook is not None:
                out = post_process_hook(out, self, state, extend=extend)

        if linearization.kind == 'adaptive':
            for key, val in out.iteritems():
                mesh = val.get('mesh', self.domain.mesh)
                aux = io.edit_filename(filename, suffix='_' + val.var_name)
                mesh.write(aux,
                           io='auto',
                           out={key: val},
                           float_format=self.float_format,
                           **kwargs)
                if hasattr(val, 'levels'):
                    output('max. refinement per group:', val.levels)

        elif file_per_var:
            meshes = {}

            if self.equations is None:
                varnames = {}
                for key, val in out.iteritems():
                    varnames[val.var_name] = 1
                varnames = varnames.keys()
                outvars = self.create_variables(varnames)
                itervars = outvars.__iter__
            else:
                itervars = self.equations.variables.iter_state

            for var in itervars():
                rname = var.field.region.name
                if meshes.has_key(rname):
                    mesh = meshes[rname]
                else:
                    mesh = Mesh.from_region(var.field.region,
                                            self.domain.mesh,
                                            localize=True,
                                            is_surface=var.is_surface)
                    meshes[rname] = mesh

                vout = {}
                for key, val in out.iteritems():
                    try:
                        if val.var_name == var.name:
                            vout[key] = val

                    except AttributeError:
                        msg = 'missing var_name attribute in output!'
                        raise ValueError(msg)

                aux = io.edit_filename(filename, suffix='_' + var.name)
                mesh.write(aux,
                           io='auto',
                           out=vout,
                           float_format=self.float_format,
                           **kwargs)
        else:
            self.domain.mesh.write(filename,
                                   io='auto',
                                   out=out,
                                   float_format=self.float_format,
                                   **kwargs)

    def save_ebc(self, filename, force=True, default=0.0):
        """
        Save essential boundary conditions as state variables.
        """
        output('saving ebc...')
        variables = self.get_variables(auto_create=True)

        ebcs = Conditions.from_conf(self.conf.ebcs, self.domain.regions)
        epbcs = Conditions.from_conf(self.conf.epbcs, self.domain.regions)

        try:
            variables.equation_mapping(ebcs,
                                       epbcs,
                                       self.ts,
                                       self.functions,
                                       problem=self)
        except:
            output('cannot make equation mapping!')
            raise

        state = self.create_state()
        state.fill(default)

        if force:
            vals = dict_from_keys_init(variables.state)
            for ii, key in enumerate(vals.iterkeys()):
                vals[key] = ii + 1

            state.apply_ebc(force_values=vals)

        else:
            state.apply_ebc()

        out = state.create_output_dict(extend=True)
        self.save_state(filename, out=out, fill_value=default)
        output('...done')

    def save_regions(self, filename_trunk, region_names=None):
        """
        Save regions as meshes.

        Parameters
        ----------
        filename_trunk : str
            The output filename without suffix.
        region_names : list, optional
            If given, only the listed regions are saved.
        """
        filename = '%s.mesh' % filename_trunk
        self.domain.save_regions(filename, region_names=region_names)

    def save_regions_as_groups(self, filename_trunk, region_names=None):
        """
        Save regions in a single mesh but mark them by using different
        element/node group numbers.

        See :func:`Domain.save_regions_as_groups()
        <sfepy.fem.domain.Domain.save_regions_as_groups()>` for more details.

        Parameters
        ----------
        filename_trunk : str
            The output filename without suffix.
        region_names : list, optional
            If given, only the listed regions are saved.
        """
        filename = '%s.%s' % (filename_trunk, self.output_format)
        self.domain.save_regions_as_groups(filename, region_names=region_names)

    def save_field_meshes(self, filename_trunk):

        output('saving field meshes...')
        for field in self.fields:
            output(field.name)
            field.write_mesh(filename_trunk + '_%s')
        output('...done')

    def get_evaluator(self, reuse=False):
        """
        Either create a new Evaluator instance (reuse == False),
        or return an existing instance, created in a preceding call to
        ProblemDefinition.init_solvers().
        """
        if reuse:
            try:
                ev = self.evaluator
            except AttributeError:
                raise AttributeError('call ProblemDefinition.init_solvers() or'\
                      ' set reuse to False!')
        else:
            if self.equations.variables.has_lcbc:
                ev = LCBCEvaluator(self, matrix_hook=self.matrix_hook)
            else:
                ev = BasicEvaluator(self, matrix_hook=self.matrix_hook)

        self.evaluator = ev

        return ev

    def init_solvers(self,
                     nls_status=None,
                     ls_conf=None,
                     nls_conf=None,
                     mtx=None,
                     presolve=False):
        """Create and initialize solvers."""
        ls_conf = get_default(ls_conf, self.ls_conf,
                              'you must set linear solver!')

        nls_conf = get_default(nls_conf, self.nls_conf,
                               'you must set nonlinear solver!')

        if presolve:
            tt = time.clock()
        if ls_conf.get('needs_problem_instance', False):
            extra_args = {'problem': self}
        else:
            extra_args = {}

        ls = Solver.any_from_conf(ls_conf,
                                  mtx=mtx,
                                  presolve=presolve,
                                  **extra_args)
        if presolve:
            tt = time.clock() - tt
            output('presolve: %.2f [s]' % tt)

        if nls_conf.get('needs_problem_instance', False):
            extra_args = {'problem': self}
        else:
            extra_args = {}
        ev = self.get_evaluator()

        if self.conf.options.get('ulf', False):
            self.nls_iter_hook = ev.new_ulf_iteration

        nls = Solver.any_from_conf(nls_conf,
                                   fun=ev.eval_residual,
                                   fun_grad=ev.eval_tangent_matrix,
                                   lin_solver=ls,
                                   iter_hook=self.nls_iter_hook,
                                   status=nls_status,
                                   **extra_args)

        self.solvers = Struct(name='solvers', ls=ls, nls=nls)

    def get_solvers(self):
        return getattr(self, 'solvers', None)

    def is_linear(self):
        nls_conf = get_default(None, self.nls_conf,
                               'you must set nonlinear solver!')
        aux = Solver.any_from_conf(nls_conf)
        if aux.conf.problem == 'linear':
            return True
        else:
            return False

    def set_linear(self, is_linear):
        nls_conf = get_default(None, self.nls_conf,
                               'you must set nonlinear solver!')
        if is_linear:
            nls_conf.problem = 'linear'
        else:
            nls_conf.problem = 'nonlinear'

    def solve(self,
              state0=None,
              nls_status=None,
              ls_conf=None,
              nls_conf=None,
              force_values=None,
              var_data=None):
        """Solve self.equations in current time step.

        Parameters
        ----------
        var_data : dict
            A dictionary of {variable_name : data vector} used to initialize
            parameter variables.
        """
        solvers = self.get_solvers()
        if solvers is None:
            self.init_solvers(nls_status, ls_conf, nls_conf)
            solvers = self.get_solvers()

        if state0 is None:
            state0 = State(self.equations.variables)

        else:
            if isinstance(state0, nm.ndarray):
                state0 = State(self.equations.variables, vec=state0)

        self.equations.set_data(var_data, ignore_unknown=True)

        self.update_materials()
        state0.apply_ebc(force_values=force_values)

        vec0 = state0.get_reduced()

        vec = solvers.nls(vec0)
        state = state0.copy(preserve_caches=True)
        state.set_reduced(vec, preserve_caches=True)

        return state

    def create_evaluable(self,
                         expression,
                         try_equations=True,
                         auto_init=False,
                         preserve_caches=False,
                         copy_materials=True,
                         integrals=None,
                         ebcs=None,
                         epbcs=None,
                         lcbcs=None,
                         ts=None,
                         functions=None,
                         mode='eval',
                         var_dict=None,
                         strip_variables=True,
                         extra_args=None,
                         verbose=True,
                         **kwargs):
        """
        Create evaluable object (equations and corresponding variables)
        from the `expression` string. Convenience function calling
        :func:`create_evaluable()
        <sfepy.fem.evaluate.create_evaluable()>` with defaults provided
        by the ProblemDefinition instance `self`.

        The evaluable can be repeatedly evaluated by calling
        :func:`eval_equations() <sfepy.fem.evaluate.eval_equations()>`,
        e.g. for different values of variables.

        Parameters
        ----------
        expression : str
            The expression to evaluate.
        try_equations : bool
            Try to get variables from `self.equations`. If this fails,
            variables can either be provided in `var_dict`, as keyword
            arguments, or are created automatically according to the
            expression.
        auto_init : bool
            Set values of all variables to all zeros.
        preserve_caches : bool
            If True, do not invalidate evaluate caches of variables.
        copy_materials : bool
            Work with a copy of `self.equations.materials` instead of
            reusing them. Safe but can be slow.
        integrals : Integrals instance, optional
            The integrals to be used. Automatically created as needed if
            not given.
        ebcs : Conditions instance, optional
            The essential (Dirichlet) boundary conditions for 'weak'
            mode. If not given, `self.ebcs` are used.
        epbcs : Conditions instance, optional
            The periodic boundary conditions for 'weak'
            mode. If not given, `self.epbcs` are used.
        lcbcs : Conditions instance, optional
            The linear combination boundary conditions for 'weak'
            mode. If not given, `self.lcbcs` are used.
        ts : TimeStepper instance, optional
            The time stepper. If not given, `self.ts` is used.
        functions : Functions instance, optional
            The user functions for boundary conditions, materials
            etc. If not given, `self.functions` are used.
        mode : one of 'eval', 'el_avg', 'qp', 'weak'
            The evaluation mode - 'weak' means the finite element
            assembling, 'qp' requests the values in quadrature points,
            'el_avg' element averages and 'eval' means integration over
            each term region.
        var_dict : dict, optional
            The variables (dictionary of (variable name) : (Variable instance))
            to be used in the expression. Use this if the name of a variable
            conflicts with one of the parameters of this method.
        strip_variables : bool
            If False, the variables in `var_dict` or `kwargs` not present in
            the expression are added to the actual variables as a context.
        extra_args : dict, optional
            Extra arguments to be passed to terms in the expression.
        verbose : bool
            If False, reduce verbosity.
        **kwargs : keyword arguments
            Additional variables can be passed as keyword arguments, see
            `var_dict`.

        Returns
        -------
        equations : Equations instance
            The equations that can be evaluated.
        variables : Variables instance
            The corresponding variables. Set their values and use
            :func:`eval_equations() <sfepy.fem.evaluate.eval_equations()>`.

        Examples
        --------
        `problem` is ProblemDefinition instance.

        >>> out = problem.create_evaluable('dq_state_in_volume_qp.i1.Omega(u)')
        >>> equations, variables = out

        `vec` is a vector of coefficients compatible with the field
        of 'u' - let's use all ones.

        >>> vec = nm.ones((variables['u'].n_dof,), dtype=nm.float64)
        >>> variables['u'].set_data(vec)
        >>> vec_qp = eval_equations(equations, variables, mode='qp')

        Try another vector:

        >>> vec = 3 * nm.ones((variables['u'].n_dof,), dtype=nm.float64)
        >>> variables['u'].set_data(vec)
        >>> vec_qp = eval_equations(equations, variables, mode='qp')
        """
        from sfepy.fem.equations import get_expression_arg_names

        variables = get_default(var_dict, {})
        var_context = get_default(var_dict, {})

        if try_equations and self.equations is not None:
            # Make a copy, so that possible variable caches are preserved.
            for key, var in self.equations.variables.as_dict().iteritems():
                if key in variables:
                    continue
                var = var.copy(name=key)
                if not preserve_caches:
                    var.clear_evaluate_cache()
                variables[key] = var

        elif var_dict is None:
            possible_var_names = get_expression_arg_names(expression)
            variables = self.create_variables(possible_var_names)

        materials = self.get_materials()
        if materials is not None:
            if copy_materials:
                materials = materials.semideep_copy()

            else:
                materials = Materials(objs=materials._objs)

        else:
            possible_mat_names = get_expression_arg_names(expression)
            materials = self.create_materials(possible_mat_names)

        _kwargs = copy(kwargs)
        for key, val in kwargs.iteritems():
            if isinstance(val, Variable):
                if val.name != key:
                    msg = 'inconsistent variable name! (%s == %s)' \
                          % (val.name, key)
                    raise ValueError(msg)
                var_context[val.name] = variables[val.name] = val
                _kwargs.pop(key)

            elif isinstance(val, Material):
                if val.name != key:
                    msg = 'inconsistent material name! (%s == %s)' \
                          % (val.name, key)
                    raise ValueError(msg)
                materials[val.name] = val
                _kwargs.pop(key)

        kwargs = _kwargs

        ebcs = get_default(ebcs, self.ebcs)
        epbcs = get_default(epbcs, self.epbcs)
        lcbcs = get_default(lcbcs, self.lcbcs)
        ts = get_default(ts, self.get_timestepper())
        functions = get_default(functions, self.functions)
        integrals = get_default(integrals, self.get_integrals())

        out = create_evaluable(expression,
                               self.fields,
                               materials,
                               variables.itervalues(),
                               integrals,
                               ebcs=ebcs,
                               epbcs=epbcs,
                               lcbcs=lcbcs,
                               ts=ts,
                               functions=functions,
                               auto_init=auto_init,
                               mode=mode,
                               extra_args=extra_args,
                               verbose=verbose,
                               kwargs=kwargs)

        if not strip_variables:
            variables = out[1]
            variables.extend([
                var for var in var_context.itervalues() if var not in variables
            ])

        equations = out[0]
        mode = 'update' if not copy_materials else 'normal'
        equations.time_update_materials(self.ts,
                                        mode=mode,
                                        problem=self,
                                        verbose=verbose)

        return out

    def evaluate(self,
                 expression,
                 try_equations=True,
                 auto_init=False,
                 preserve_caches=False,
                 copy_materials=True,
                 integrals=None,
                 ebcs=None,
                 epbcs=None,
                 lcbcs=None,
                 ts=None,
                 functions=None,
                 mode='eval',
                 dw_mode='vector',
                 term_mode=None,
                 var_dict=None,
                 strip_variables=True,
                 ret_variables=False,
                 verbose=True,
                 extra_args=None,
                 **kwargs):
        """
        Evaluate an expression, convenience wrapper of
        :func:`ProblemDefinition.create_evaluable` and
        :func:`eval_equations() <sfepy.fem.evaluate.eval_equations>`.

        Parameters
        ----------
        dw_mode : 'vector' or 'matrix'
            The assembling mode for 'weak' evaluation mode.
        term_mode : str
            The term call mode - some terms support different call modes
            and depending on the call mode different values are
            returned.
        ret_variables : bool
            If True, return the variables that were created to evaluate
            the expression.
        other : arguments
            See docstrings of :func:`ProblemDefinition.create_evaluable`.

        Returns
        -------
        out : array
            The result of the evaluation.
        variables : Variables instance
            The variables that were created to evaluate
            the expression. Only provided if `ret_variables` is True.
        """
        aux = self.create_evaluable(expression,
                                    try_equations=try_equations,
                                    auto_init=auto_init,
                                    preserve_caches=preserve_caches,
                                    copy_materials=copy_materials,
                                    integrals=integrals,
                                    ebcs=ebcs,
                                    epbcs=epbcs,
                                    lcbcs=lcbcs,
                                    ts=ts,
                                    functions=functions,
                                    mode=mode,
                                    var_dict=var_dict,
                                    strip_variables=strip_variables,
                                    extra_args=extra_args,
                                    verbose=verbose,
                                    **kwargs)
        equations, variables = aux

        out = eval_equations(equations,
                             variables,
                             preserve_caches=preserve_caches,
                             mode=mode,
                             dw_mode=dw_mode,
                             term_mode=term_mode)

        if ret_variables:
            out = (out, variables)

        return out

    def get_time_solver(self, ts_conf=None, **kwargs):
        """
        Create and return a TimeSteppingSolver instance.

        Notes
        -----
        Also sets `self.ts` attribute.
        """
        ts_conf = get_default(ts_conf, self.ts_conf,
                              'you must set time-stepping solver!')
        ts_solver = Solver.any_from_conf(ts_conf, problem=self, **kwargs)
        self.ts = ts_solver.ts

        return ts_solver

    def get_materials(self):
        if self.equations is not None:
            materials = self.equations.materials

        else:
            materials = None

        return materials

    def create_materials(self, mat_names=None):
        """
        Create materials with names in `mat_names`. Their definitions
        have to be present in `self.conf.materials`.

        Notes
        -----
        This method does not change `self.equations`, so it should not
        have any side effects.
        """
        if mat_names is not None:
            conf_materials = self.select_materials(mat_names, only_conf=True)

        else:
            conf_materials = self.conf.materials

        materials = Materials.from_conf(conf_materials, self.functions)

        return materials

    def init_variables(self, state):
        """Initialize variables with history."""
        self.equations.variables.init_state(state)

    def get_variables(self, auto_create=False):
        if self.equations is not None:
            variables = self.equations.variables

        elif auto_create:
            variables = self.create_variables()

        else:
            variables = None

        return variables

    def create_variables(self, var_names=None):
        """
        Create variables with names in `var_names`. Their definitions
        have to be present in `self.conf.variables`.

        Notes
        -----
        This method does not change `self.equations`, so it should not
        have any side effects.
        """
        if var_names is not None:
            conf_variables = self.select_variables(var_names, only_conf=True)

        else:
            conf_variables = self.conf.variables

        variables = Variables.from_conf(conf_variables, self.fields)

        return variables

    def get_output_name(self, suffix=None, extra=None, mode=None):
        """
        Return default output file name, based on the output directory,
        output format, step suffix and mode. If present, the extra
        string is put just before the output format suffix.
        """
        out = op.join(self.output_dir, self.ofn_trunk)

        if suffix is not None:
            if mode is None:
                mode = self.output_modes[self.output_format]

            if mode == 'sequence':
                out = '.'.join((out, suffix))

        if extra is not None:
            out = '.'.join((out, extra, self.output_format))
        else:
            out = '.'.join((out, self.output_format))

        return out

    def remove_bcs(self):
        """
        Convenience function to remove boundary conditions.
        """
        self.time_update(ebcs={}, epbcs={}, lcbcs={})
예제 #39
0
 def __init__(self, problem, matrix_hook=None):
     Struct.__init__(self, problem=problem, matrix_hook=matrix_hook)
예제 #40
0
    def test_solution(self):

        from sfepy.base.base import Struct
        from sfepy.base.conf import ProblemConf, get_standard_keywords
        from sfepy.applications import solve_pde, assign_standard_hooks
        import numpy as nm
        import os.path as op

        solutions = {}
        ok = True

        for hp, pb_filename in input_names.iteritems():

            required, other = get_standard_keywords()
            input_name = op.join(op.dirname(__file__), pb_filename)
            test_conf = ProblemConf.from_file(input_name, required, other)

            name = output_name_trunk + hp
            solver_options = Struct(output_filename_trunk=name,
                                    output_format='vtk',
                                    save_ebc=False,
                                    save_ebc_nodes=False,
                                    save_regions=False,
                                    save_regions_as_groups=False,
                                    save_field_meshes=False,
                                    solve_not=False)
            assign_standard_hooks(self, test_conf.options.get, test_conf)

            self.report('hyperelastic formulation: %s' % (hp, ))

            status = NLSStatus(conditions=[])

            pb, state = solve_pde(
                test_conf,
                solver_options,
                nls_status=status,
                output_dir=self.options.out_dir,
                step_hook=self.step_hook,
                post_process_hook=self.post_process_hook,
                post_process_hook_final=self.post_process_hook_final)

            converged = status.condition == 0
            ok = ok and converged

            solutions[hp] = state.get_parts()['u']
            self.report('%s solved' % input_name)

        rerr = 1.0e-3
        aerr = nm.linalg.norm(solutions['TL'], ord=None) * rerr

        self.report('allowed error: rel = %e, abs = %e' % (rerr, aerr))
        ok = ok and self.compare_vectors(solutions['TL'],
                                         solutions['UL'],
                                         label1='TLF',
                                         label2='ULF',
                                         allowed_error=rerr)

        ok = ok and self.compare_vectors(solutions['UL'],
                                         solutions['ULM'],
                                         label1='ULF',
                                         label2='ULF_mixed',
                                         allowed_error=rerr)

        return ok
예제 #41
0
    def __init__(self, name, dof_names, var_di):
        Struct.__init__(self, name=name, dof_names=dof_names, var_di=var_di)

        self.dpn = len(self.dof_names)
        self.eq = nm.arange(var_di.n_dof, dtype=nm.int32)
예제 #42
0
class Probe(Struct):
    """
    Base class for all point probes. Enforces two points minimum.
    """
    cache = Struct(name='probe_shared_cache',
                   offsets=None,
                   iconn=None,
                   kdtree=None)
    is_cyclic = False

    def __init__(self, name, mesh, share_mesh=True, n_point=None, **kwargs):
        """
        Parameters
        ----------
        name : str
            The probe name, set automatically by the subclasses.
        mesh : Mesh instance
            The FE mesh where the variables to be probed are defined.
        share_mesh : bool
            Set to True to indicate that all the probes will work on the same
            mesh. Certain data are then computed only for the first probe and
            cached.
        n_point : int
           The (fixed) number of probe points, when positive. When non-positive,
           the number of points is adaptively increased starting from -n_point,
           until the neighboring point distance is less than the diameter of the
           elements enclosing the points. When None, it is set to -10.

        For additional parameters see the __init__() docstrings of the
        subclasses.

        Notes
        -----
        If the mesh contains vertices that are not contained in any element, we
        shift coordinates of such vertices so that they never match in the
        nearest node search.
        """
        Struct.__init__(self, name=name, mesh=mesh, **kwargs)

        self.set_n_point(n_point)

        self.options = Struct(close_limit=0.1, size_hint=None)

        self.is_refined = False

        tt = time.clock()
        if share_mesh:
            if Probe.cache.iconn is None:
                offsets, iconn = make_inverse_connectivity(mesh.conns,
                                                           mesh.n_nod,
                                                           ret_offsets=True)
                Probe.cache.iconn = iconn
                Probe.cache.offsets = offsets
            self.cache = Probe.cache

        else:
            offsets, iconn = make_inverse_connectivity(mesh.conns,
                                                       mesh.n_nod,
                                                       ret_offsets=True)
            self.cache = Struct(name='probe_cache',
                                offsets=offsets,
                                iconn=iconn,
                                kdtree=None)
        output('iconn: %f s' % (time.clock() - tt))

        i_bad = nm.where(nm.diff(self.cache.offsets) == 0)[0]
        if len(i_bad):
            bbox = mesh.get_bounding_box()
            mesh.coors[i_bad] = bbox[1] + bbox[1] - bbox[0]
            output('warning: some vertices are not in any element!')
            output('warning: vertex-based results will be wrong!')

        tt = time.clock()
        if share_mesh:
            if Probe.cache.kdtree is None:
                self.cache.kdtree = KDTree(mesh.coors)

        else:
            self.cache.kdtree = KDTree(mesh.coors)

        output('kdtree: %f s' % (time.clock() - tt))

    def set_n_point(self, n_point):
        """
        Set the number of probe points.

        Parameters
        ----------
        n_point : int
           The (fixed) number of probe points, when positive. When non-positive,
           the number of points is adaptively increased starting from -n_point,
           until the neighboring point distance is less than the diameter of the
           elements enclosing the points. When None, it is set to -10.
        """
        if n_point is None:
            n_point = -10

        if n_point <= 0:
            n_point = max(-n_point, 2)
            self.n_point_required = -1

        else:
            n_point = max(n_point, 2)
            self.n_point_required = n_point

        self.n_point0 = self.n_point = n_point

    def set_options(self, close_limit=None, size_hint=None):
        """
        Set the probe options.

        Parameters
        ----------
        close_limit : float
            The maximum limit distance of a point from the closest
            element allowed for extrapolation.
        size_hint : float
            Element size hint for the refinement of probe parametrization.
        """
        if close_limit is not None:
            self.options.close_limit = close_limit

        if size_hint is not None:
            self.options.size_hint = size_hint

    def report(self):
        """Report the probe parameters."""
        out = [self.__class__.__name__]

        if self.n_point_required == -1:
            aux = 'adaptive'
        else:
            aux = 'fixed'
        out.append('number of points: %s (%s)' % (self.n_point, aux))

        return out

    def __call__(self, variable):
        """
        Probe the given variable. The actual implementation is in self.probe(),
        so that it can be overridden in subclasses.

        Parameters
        ----------
        variable : Variable instance
            The variable to be sampled along the probe.
        """
        return self.probe(variable)

    def probe(self, variable):
        """
        Probe the given variable.

        Parameters
        ----------
        variable : Variable instance
            The variable to be sampled along the probe.
        """
        refine_flag = None
        ev = variable.evaluate_at

        self.reset_refinement()

        while True:
            pars, points = self.get_points(refine_flag)

            vals, cells, status = ev(points,
                                     strategy='kdtree',
                                     close_limit=self.options.close_limit,
                                     cache=self.cache,
                                     ret_status=True)
            ii = nm.where(status > 1)[0]
            vals[ii] = nm.nan

            if self.is_refined:
                break

            else:
                refine_flag = self.refine_points(variable, points, cells)
                if (refine_flag == False).all():
                    break

        self.is_refined = True

        return pars, vals

    def reset_refinement(self):
        """
        Reset the probe refinement state.
        """
        self.is_refined = False
        self.n_point = self.n_point0

    def refine_points(self, variable, points, cells):
        """
        Mark intervals between points for a refinement, based on element
        sizes at those points. Assumes the points to be ordered.

        Returns
        -------
        refine_flag : bool array
            True at places corresponding to intervals between subsequent points
            that need to be refined.
        """
        if self.n_point_required == self.n_point:
            refine_flag = nm.array([False])

        else:
            if self.options.size_hint is None:
                ed = variable.get_element_diameters(cells, 0)
                pd = 0.5 * (ed[1:] + ed[:-1])

            else:
                pd = self.options.size_hint

            dist = norm_l2_along_axis(points[1:] - points[:-1])

            refine_flag = dist > pd
            if self.is_cyclic:
                pd1 = 0.5 * (ed[0] + ed[-1])
                dist1 = nla.norm(points[0] - points[-1])

                refine_flag = nm.r_[refine_flag, dist1 > pd1]

        return refine_flag

    @staticmethod
    def refine_pars(pars, refine_flag, cyclic_val=None):
        """
        Refine the probe parametrization based on the refine_flag.
        """
        ii = nm.where(refine_flag)[0]
        ip = ii + 1

        if cyclic_val is not None:
            cpars = nm.r_[pars, cyclic_val]
            pp = 0.5 * (cpars[ip] + cpars[ii])

        else:
            pp = 0.5 * (pars[ip] + pars[ii])

        pars = nm.insert(pars, ip, pp)

        return pars
예제 #43
0
 def __init__(self, num=0):
     Struct.__init__(self, num=num, shape=(0, 0, 0))
     self.values = nm.empty(self.shape, dtype=nm.float64)
예제 #44
0
 def process_options(self):
     get = self.options.get
     return Struct(incident_wave_dir=get('incident_wave_dir', None))
예제 #45
0
    def create_mapping(self, region, integral, integration,
                       return_mapping=True):
        """
        Create a new reference mapping.

        Compute jacobians, element volumes and base function derivatives
        for Volume-type geometries (volume mappings), and jacobians,
        normals and base function derivatives for Surface-type
        geometries (surface mappings).

        Notes
        -----
        - surface mappings are defined on the surface region
        - surface mappings require field order to be > 0
        """
        domain = self.domain
        coors = domain.get_mesh_coors(actual=True)
        dconn = domain.get_conn()

        if integration == 'volume':
            qp = self.get_qp('v', integral)

            iels = region.get_cells()

            geo_ps = self.gel.poly_space
            ps = self.poly_space
            bf = self.get_base('v', 0, integral, iels=iels)

            conn = nm.take(dconn, iels.astype(nm.int32), axis=0)
            mapping = VolumeMapping(coors, conn, poly_space=geo_ps)
            vg = mapping.get_mapping(qp.vals, qp.weights, poly_space=ps,
                                     ori=self.ori,
                                     transform=self.basis_transform)

            out = vg

        elif (integration == 'surface') or (integration == 'surface_extra'):
            assert_(self.approx_order > 0)

            if self.ori is not None:
                msg = 'surface integrals do not work yet with the' \
                      ' hierarchical basis!'
                raise ValueError(msg)

            sd = domain.surface_groups[region.name]
            esd = self.surface_data[region.name]

            geo_ps = self.gel.poly_space
            ps = self.poly_space

            conn = sd.get_connectivity()
            mapping = SurfaceMapping(coors, conn, poly_space=geo_ps)

            if not self.is_surface:
                self.create_bqp(region.name, integral)
                qp = self.qp_coors[(integral.order, esd.bkey)]

                abf = ps.eval_base(qp.vals[0], transform=self.basis_transform)
                bf = abf[..., self.efaces[0]]

                indx = self.gel.get_surface_entities()[0]
                # Fix geometry element's 1st facet orientation for gradients.
                indx = nm.roll(indx, -1)[::-1]
                mapping.set_basis_indices(indx)

                sg = mapping.get_mapping(qp.vals[0], qp.weights,
                                         poly_space=Struct(n_nod=bf.shape[-1]),
                                         mode=integration)

                if integration == 'surface_extra':
                    sg.alloc_extra_data(self.econn.shape[1])

                    bf_bg = geo_ps.eval_base(qp.vals, diff=True)
                    ebf_bg = self.get_base(esd.bkey, 1, integral)

                    sg.evaluate_bfbgm(bf_bg, ebf_bg, coors, sd.fis, dconn)

            else:
                # Do not use BQP for surface fields.
                qp = self.get_qp(sd.face_type, integral)
                bf = ps.eval_base(qp.vals, transform=self.basis_transform)

                sg = mapping.get_mapping(qp.vals, qp.weights,
                                         poly_space=Struct(n_nod=bf.shape[-1]),
                                         mode=integration)

            out =  sg

        elif integration == 'point':
            out = mapping = None

        elif integration == 'custom':
            raise ValueError('cannot create custom mapping!')

        else:
            raise ValueError('unknown integration geometry type: %s'
                             % integration)

        if out is not None:
            # Store the integral used.
            out.integral = integral
            out.qp = qp
            out.ps = ps
            # Update base.
            out.bf[:] = bf

        if return_mapping:
            out = (out, mapping)

        return out
예제 #46
0
파일: terms.py 프로젝트: chiao45/sfepy
    def classify_args(self):
        """
        Classify types of the term arguments and find matching call
        signature.

        A state variable can be in place of a parameter variable and
        vice versa.
        """
        self.names = Struct(name='arg_names',
                            material=[],
                            variable=[],
                            user=[],
                            state=[],
                            virtual=[],
                            parameter=[])

        # Prepare for 'opt_material' - just prepend a None argument if needed.
        if isinstance(self.arg_types[0], tuple):
            arg_types = self.arg_types[0]

        else:
            arg_types = self.arg_types

        if len(arg_types) == (len(self.args) + 1):
            self.args.insert(0, (None, None))
            self.arg_names.insert(0, (None, None))

        if isinstance(self.arg_types[0], tuple):
            assert_(len(self.modes) == len(self.arg_types))
            # Find matching call signature using variable arguments - material
            # and user arguments are ignored!
            matched = []
            for it, arg_types in enumerate(self.arg_types):
                arg_kinds = get_arg_kinds(arg_types)
                if self._check_variables(arg_kinds):
                    matched.append((it, arg_kinds))

            if len(matched) == 1:
                i_match, arg_kinds = matched[0]
                arg_types = self.arg_types[i_match]
                self.mode = self.modes[i_match]

            elif len(matched) == 0:
                msg = 'cannot match arguments! (%s)' % self.arg_names
                raise ValueError(msg)

            else:
                msg = 'ambiguous arguments! (%s)' % self.arg_names
                raise ValueError(msg)

        else:
            arg_types = self.arg_types
            arg_kinds = get_arg_kinds(self.arg_types)
            self.mode = Struct.get(self, 'mode', None)

            if not self._check_variables(arg_kinds):
                raise ValueError('cannot match variables! (%s)' %
                                 self.arg_names)

        # Set actual argument types.
        self.ats = list(arg_types)

        for ii, arg_kind in enumerate(arg_kinds):
            name = self.arg_names[ii]
            if arg_kind.endswith('variable'):
                names = self.names.variable

                if arg_kind == 'virtual_variable':
                    self.names.virtual.append(name)

                elif arg_kind == 'state_variable':
                    self.names.state.append(name)

                elif arg_kind == 'parameter_variable':
                    self.names.parameter.append(name)

            elif arg_kind.endswith('material'):
                names = self.names.material

            else:
                names = self.names.user

            names.append(name)

        self.n_virtual = len(self.names.virtual)
        if self.n_virtual > 1:
            raise ValueError('at most one virtual variable is allowed! (%d)' %
                             self.n_virtual)

        self.set_arg_types()

        self.setup_integration()
예제 #47
0
파일: log.py 프로젝트: mfkiwl/sfepy
    def __init__(self,
                 data_names=None,
                 xlabels=None,
                 ylabels=None,
                 yscales=None,
                 is_plot=True,
                 aggregate=200,
                 log_filename=None,
                 formats=None):
        """
        Parameters
        ----------
        data_names : list of lists of str
            The data names grouped by subplots: [[name1, name2, ...], [name3,
            name4, ...], ...], where name<n> are strings to display in
            (sub)plot legends.
        xlabels : list of str
            The x axis labels of subplots.
        ylabels : list of str
            The y axis labels of subplots.
        yscales : list of 'linear' or 'log'
            The y axis scales of subplots.
        is_plot : bool
            If True, try to use LogPlotter for plotting.
        aggregate : int
            The number of plotting commands to process before a redraw.
        log_filename : str, optional
            If given, save log data into a log file.
        formats : list of lists of number format strings
            The print formats of data to be used in a log file, group in the
            same way as subplots.
        """
        try:
            import matplotlib as mpl
        except:
            mpl = None

        if (mpl is not None) and mpl.rcParams['backend'] == 'GTKAgg':
            can_live_plot = True
        else:
            can_live_plot = False

        Struct.__init__(self,
                        data_names={},
                        n_arg=0,
                        n_gr=0,
                        data={},
                        x_values={},
                        n_calls=0,
                        yscales={},
                        xlabels={},
                        ylabels={},
                        plot_pipe=None,
                        formats={},
                        output=None)

        if data_names is not None:
            n_gr = len(data_names)
        else:
            n_gr = 0
            data_names = []

        yscales = get_default(yscales, ['linear'] * n_gr)
        xlabels = get_default(xlabels, ['iteration'] * n_gr)
        ylabels = get_default(ylabels, [''] * n_gr)

        if formats is None:
            formats = [None] * n_gr

        for ig, names in enumerate(data_names):
            self.add_group(names, yscales[ig], xlabels[ig], ylabels[ig],
                           formats[ig])

        self.is_plot = get_default(is_plot, True)
        self.aggregate = get_default(aggregate, 100)

        self.can_plot = (can_live_plot and (mpl is not None)
                         and (Process is not None))

        if log_filename is not None:
            self.output = Output('', filename=log_filename)
            self.output('# started: %s' % time.asctime())
            self.output('# groups: %d' % n_gr)
            for ig, names in enumerate(data_names):
                self.output('#   %d' % ig)
                self.output('#     xlabel: "%s", ylabel: "%s", yscales: "%s"' %
                            (xlabels[ig], ylabels[ig], yscales[ig]))
                self.output('#     names: "%s"' % ', '.join(names))

        if self.is_plot and (not self.can_plot):
            output(_msg_no_live)
예제 #48
0
filename = data_dir + '/meshes/2d/special/circle_in_square.mesh'

output_dir = incwd('output/band_gaps')

# aluminium, in 1e+10 Pa
D_m = get_pars(2, 5.898, 2.681)
density_m = 0.2799 # in 1e4 kg/m3

# epoxy, in 1e+10 Pa
D_c = get_pars(2, 0.1798, 0.148)
density_c = 0.1142 # in 1e4 kg/m3

mat_pars = Coefficients(D_m=D_m, density_m=density_m,
                        D_c=D_c, density_c=density_c)

region_selects = Struct(matrix='cells of group 1',
                        inclusion='cells of group 2')

corrs_save_names = {'evp' : 'evp', 'corrs_rs' : 'corrs_rs'}

options = {
    'plot_transform_angle' : None,
    'plot_transform_wave' : ('clip_sqrt', (0, 30)),
    'plot_transform' : ('normalize', (-2, 2)),

    'fig_name' : 'band_gaps',
    'fig_name_angle' : 'band_gaps_angle',
    'fig_name_wave' : 'band_gaps_wave',
    'fig_suffix' : '.pdf',

    'coefs_filename' : 'coefs.txt',
예제 #49
0
    def process_options(options):
        """
        Application options setup. Sets default values for missing
        non-compulsory options.
        """
        get = options.get

        default_plot_options = {'show' : True,'legend' : False,}

        aux = {
            'resonance' : 'eigenfrequencies',
            'masked' : 'masked eigenfrequencies',
            'eig_min' : r'min eig($M^*$)',
            'eig_mid' : r'mid eig($M^*$)',
            'eig_max' : r'max eig($M^*$)',
            'x_axis' : r'$\sqrt{\lambda}$, $\omega$',
            'y_axis' : r'eigenvalues of mass matrix $M^*$',
        }
        plot_labels = try_set_defaults(options, 'plot_labels', aux, recur=True)

        aux = {
            'resonance' : 'eigenfrequencies',
            'masked' : 'masked eigenfrequencies',
            'eig_min' : r'$\kappa$(min)',
            'eig_mid' : r'$\kappa$(mid)',
            'eig_max' : r'$\kappa$(max)',
            'x_axis' : r'$\sqrt{\lambda}$, $\omega$',
            'y_axis' : 'polarization angles',
        }
        plot_labels_angle = try_set_defaults(options, 'plot_labels_angle', aux)

        aux = {
            'resonance' : 'eigenfrequencies',
            'masked' : 'masked eigenfrequencies',
            'eig_min' : r'wave number (min)',
            'eig_mid' : r'wave number (mid)',
            'eig_max' : r'wave number (max)',
            'x_axis' : r'$\sqrt{\lambda}$, $\omega$',
            'y_axis' : 'wave numbers',
        }
        plot_labels_wave = try_set_defaults(options, 'plot_labels_wave', aux)

        plot_rsc =  {
            'resonance' : {'linewidth' : 0.5, 'color' : 'r', 'linestyle' : '-'},
            'masked' : {'linewidth' : 0.5, 'color' : 'r', 'linestyle' : ':'},
            'x_axis' : {'linewidth' : 0.5, 'color' : 'k', 'linestyle' : '--'},
            'eig_min' : {'linewidth' : 2.0, 'color' : (0.0, 0.0, 1.0),
                         'linestyle' : ':' },
            'eig_mid' : {'linewidth' : 2.0, 'color' : (0.0, 0.0, 0.8),
                         'linestyle' : '--' },
            'eig_max' : {'linewidth' : 2.0, 'color' : (0.0, 0.0, 0.6),
                         'linestyle' : '-' },
            'strong_gap' : {'linewidth' : 0, 'facecolor' : (0.2, 0.4, 0.2)},
            'weak_gap' : {'linewidth' : 0, 'facecolor' : (0.6, 0.8, 0.6)},
            'propagation' : {'linewidth' : 0, 'facecolor' : (1, 1, 1)},
            'params' : {'axes.labelsize': 'x-large',
                        'font.size': 14,
                        'legend.fontsize': 'large',
                        'legend.loc': 'best',
                        'xtick.labelsize': 'large',
                        'ytick.labelsize': 'large',
                        'text.usetex': True},
        }
        plot_rsc = try_set_defaults(options, 'plot_rsc', plot_rsc)

        return Struct(incident_wave_dir=get('incident_wave_dir', None),

                      plot_transform=get('plot_transform', None),
                      plot_transform_wave=get('plot_transform_wave', None),
                      plot_transform_angle=get('plot_transform_angle', None),

                      plot_options=get('plot_options', default_plot_options),

                      fig_name=get('fig_name', None),
                      fig_name_wave=get('fig_name_wave', None),
                      fig_name_angle=get('fig_name_angle', None),
                      fig_suffix=get('fig_suffix', '.pdf'),

                      plot_labels=plot_labels,
                      plot_labels_angle=plot_labels_angle,
                      plot_labels_wave=plot_labels_wave,
                      plot_rsc=plot_rsc)
예제 #50
0
def main():
    parser = ArgumentParser(description=__doc__,
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('--version',
                        action='version',
                        version='%(prog)s ' + sfepy.__version__)
    parser.add_argument('--debug',
                        action='store_true',
                        dest='debug',
                        default=False,
                        help=help['debug'])
    parser.add_argument('-o',
                        metavar='filename',
                        action='store',
                        dest='output_filename_trunk',
                        default=None,
                        help=help['filename'])
    parser.add_argument('-d',
                        '--dump',
                        action='store_true',
                        dest='dump',
                        default=False,
                        help=help['dump'])
    parser.add_argument('--same-dir',
                        action='store_true',
                        dest='same_dir',
                        default=False,
                        help=help['same_dir'])
    parser.add_argument('-l',
                        '--linearization',
                        metavar='options',
                        action='store',
                        dest='linearization',
                        default=None,
                        help=help['linearization'])
    parser.add_argument('--times',
                        action='store_true',
                        dest='times',
                        default=False,
                        help=help['times'])
    parser.add_argument('-f',
                        '--from',
                        type=int,
                        metavar='ii',
                        action='store',
                        dest='step_from',
                        default=0,
                        help=help['from'])
    parser.add_argument('-t',
                        '--to',
                        type=int,
                        metavar='ii',
                        action='store',
                        dest='step_to',
                        default=None,
                        help=help['to'])
    parser.add_argument('-s',
                        '--step',
                        type=int,
                        metavar='ii',
                        action='store',
                        dest='step_by',
                        default=1,
                        help=help['step'])
    parser.add_argument('-e',
                        '--extract',
                        metavar='list',
                        action='store',
                        dest='extract',
                        default=None,
                        help=help['extract'])
    parser.add_argument('-a',
                        '--average',
                        action='store_true',
                        dest='average',
                        default=False,
                        help=help['average'])
    parser.add_argument('input_file', nargs='?', default=None)
    parser.add_argument('results_file')
    options = parser.parse_args()

    if options.debug:
        from sfepy.base.base import debug_on_error
        debug_on_error()

    filename_in = options.input_file
    filename_results = options.results_file

    if filename_in is None:
        linearize = False
    else:
        linearize = True
        options.dump = True

    if options.times:
        steps, times, nts, dts = th.extract_times(filename_results)
        for ii, time in enumerate(times):
            step = steps[ii]
            print('%d %e %e %e' % (step, time, nts[ii], dts[ii]))

    if options.dump:
        trunk = get_default(options.output_filename_trunk,
                            get_trunk(filename_results))
        if options.same_dir:
            trunk = os.path.join(os.path.dirname(filename_results),
                                 os.path.basename(trunk))

        args = {}
        if linearize:
            problem = create_problem(filename_in)

            linearization = Struct(kind='adaptive',
                                   min_level=0,
                                   max_level=2,
                                   eps=1e-2)
            aux = problem.conf.options.get('linearization', None)
            linearization.update(aux)

            if options.linearization is not None:
                aux = parse_linearization(options.linearization)
                linearization.update(aux)

            args.update({
                'fields': problem.fields,
                'linearization': linearization
            })

        if options.step_to is None:
            args.update({'step0': options.step_from})

        else:
            args.update({
                'steps':
                nm.arange(options.step_from,
                          options.step_to + 1,
                          options.step_by,
                          dtype=nm.int)
            })

        th.dump_to_vtk(filename_results, output_filename_trunk=trunk, **args)

    if options.extract:
        ths, ts = th.extract_time_history(filename_results, options.extract)

        if options.average:
            ths = th.average_vertex_var_in_cells(ths)

        if options.output_filename_trunk:
            th.save_time_history(ths, ts,
                                 options.output_filename_trunk + '.h5')

        else:
            print(dict_to_struct(ths, flag=(1, 1, 1)).str_all())
예제 #51
0
def detect_band_gaps(mass, freq_info, opts, gap_kind='normal', mtx_b=None):
    """
    Detect band gaps given solution to eigenproblem (eigs,
    eig_vectors). Only valid resonance frequencies (e.i. those for which
    corresponding eigenmomenta are above a given threshold) are taken into
    account.

    Notes
    -----
    - make freq_eps relative to ]f0, f1[ size?
    """
    output('eigensolver:', opts.eigensolver)

    fm = freq_info.freq_range_margins
    min_freq, max_freq = fm[0], fm[-1]
    output('freq. range with margins: [%8.3f, %8.3f]' % (min_freq, max_freq))

    df = opts.freq_step * (max_freq - min_freq)

    fz_callback = get_callback(mass.evaluate,
                               opts.eigensolver,
                               mtx_b=mtx_b,
                               mode='find_zero')
    trace_callback = get_callback(mass.evaluate,
                                  opts.eigensolver,
                                  mtx_b=mtx_b,
                                  mode='trace')

    n_col = 1 + (mtx_b is not None)
    logs = [[] for ii in range(n_col + 1)]
    gaps = []

    for ii in range(freq_info.freq_range.shape[0] + 1):

        f0, f1 = fm[[ii, ii + 1]]
        output('interval: ]%.8f, %.8f[...' % (f0, f1))

        log_freqs = get_log_freqs(f0, f1, df, opts.freq_eps, 100, 1000)

        output('n_logged: %d' % log_freqs.shape[0])

        log_mevp = [[] for ii in range(n_col)]
        for f in log_freqs:
            for ii, data in enumerate(trace_callback(f)):
                log_mevp[ii].append(data)

        # Get log for the first and last f in log_freqs.
        lf0 = log_freqs[0]
        lf1 = log_freqs[-1]

        log0, log1 = log_mevp[0][0], log_mevp[0][-1]
        min_eig0 = log0[0]
        max_eig1 = log1[-1]
        if gap_kind == 'liquid':
            mevp = nm.array(log_mevp, dtype=nm.float64).squeeze()
            si = nm.where(mevp[:, 0] < 0.0)[0]
            li = nm.where(mevp[:, -1] < 0.0)[0]
            wi = nm.setdiff1d(si, li)

            if si.shape[0] == 0:  # No gaps.
                gap = ([2, lf0, log0[0]], [2, lf0, log0[-1]])
                gaps.append(gap)

            elif li.shape[0] == mevp.shape[0]:  # Full interval strong gap.
                gap = ([1, lf1, log1[0]], [1, lf1, log1[-1]])
                gaps.append(gap)

            else:
                subgaps = []
                for chunk in split_chunks(li):  # Strong gaps.
                    i0, i1 = chunk[0], chunk[-1]
                    fmin, fmax = log_freqs[i0], log_freqs[i1]
                    gap = ([1, fmin, mevp[i0, -1]], [1, fmax, mevp[i1, -1]])
                    subgaps.append(gap)

                for chunk in split_chunks(wi):  # Weak gaps.
                    i0, i1 = chunk[0], chunk[-1]
                    fmin, fmax = log_freqs[i0], log_freqs[i1]
                    gap = ([0, fmin, mevp[i0, -1]], [2, fmax, mevp[i1, -1]])
                    subgaps.append(gap)
                gaps.append(subgaps)

        else:
            if min_eig0 > 0.0:  # No gaps.
                gap = ([2, lf0, log0[0]], [2, lf0, log0[-1]])

            elif max_eig1 < 0.0:  # Full interval strong gap.
                gap = ([1, lf1, log1[0]], [1, lf1, log1[-1]])

            else:
                llog_freqs = list(log_freqs)

                # Insert fmin, fmax into log.
                output('finding zero of the largest eig...')
                smax, fmax, vmax = find_zero(lf0, lf1, fz_callback,
                                             opts.freq_eps, opts.zero_eps, 1)
                im = nm.searchsorted(log_freqs, fmax)
                llog_freqs.insert(im, fmax)
                for ii, data in enumerate(trace_callback(fmax)):
                    log_mevp[ii].insert(im, data)

                output('...done')
                if smax in [0, 2]:
                    output('finding zero of the smallest eig...')
                    # having fmax instead of f0 does not work if freq_eps is
                    # large.
                    smin, fmin, vmin = find_zero(lf0, lf1, fz_callback,
                                                 opts.freq_eps, opts.zero_eps,
                                                 0)
                    im = nm.searchsorted(log_freqs, fmin)
                    # +1 due to fmax already inserted before.
                    llog_freqs.insert(im + 1, fmin)
                    for ii, data in enumerate(trace_callback(fmin)):
                        log_mevp[ii].insert(im + 1, data)

                    output('...done')

                elif smax == 1:
                    smin = 1  # both are negative everywhere.
                    fmin, vmin = fmax, vmax

                gap = ([smin, fmin, vmin], [smax, fmax, vmax])

                log_freqs = nm.array(llog_freqs)

            output(gap[0])
            output(gap[1])

            gaps.append(gap)

        logs[0].append(log_freqs)
        for ii, data in enumerate(log_mevp):
            logs[ii + 1].append(nm.array(data, dtype=nm.float64))

        output('...done')

    kinds = describe_gaps(gaps)

    slogs = Struct(freqs=logs[0], eigs=logs[1])
    if n_col == 2:
        slogs.eig_vectors = logs[2]

    return slogs, gaps, kinds
예제 #52
0
def generate_images(images_dir, examples_dir):
    """
    Generate images from results of running examples found in
    `examples_dir` directory.

    The generated images are stored to `images_dir`,
    """
    from sfepy.applications import solve_pde
    from sfepy.postprocess.viewer import Viewer
    from sfepy.postprocess.utils import mlab
    from sfepy.solvers.ts_solvers import StationarySolver

    prefix = output.prefix

    output_dir = tempfile.mkdtemp()
    trunk = os.path.join(output_dir, 'result')
    options = Struct(output_filename_trunk=trunk,
                     output_format='vtk',
                     save_ebc=False,
                     save_ebc_nodes=False,
                     save_regions=False,
                     save_field_meshes=False,
                     save_regions_as_groups=False,
                     solve_not=False)
    default_views = {'': {}}

    ensure_path(images_dir + os.path.sep)

    view = Viewer('', offscreen=False)

    for ex_filename in locate_files('*.py', examples_dir):
        if _omit(ex_filename): continue

        output.level = 0
        output.prefix = prefix
        ebase = ex_filename.replace(examples_dir, '')[1:]
        output('trying "%s"...' % ebase)

        try:
            problem, state = solve_pde(ex_filename, options=options)

        except KeyboardInterrupt:
            raise

        except:
            problem = None
            output('***** failed! *****')

        if problem is not None:
            if ebase in custom:
                views = custom[ebase]

            else:
                views = default_views

            tsolver = problem.get_solver()
            if isinstance(tsolver, StationarySolver):
                suffix = None

            else:
                suffix = tsolver.ts.suffix % (tsolver.ts.n_step - 1)

            filename = problem.get_output_name(suffix=suffix)

            for suffix, kwargs in six.iteritems(views):
                fig_filename = _get_fig_filename(ebase, images_dir, suffix)

                fname = edit_filename(filename, suffix=suffix)
                output('displaying results from "%s"' % fname)
                disp_name = fig_filename.replace(sfepy.data_dir, '')
                output('to "%s"...' % disp_name.lstrip(os.path.sep))

                view.filename = fname
                view(scene=view.scene,
                     show=False,
                     is_scalar_bar=True,
                     **kwargs)
                view.save_image(fig_filename)
                mlab.clf()

                output('...done')

            remove_files(output_dir)

        output('...done')
예제 #53
0
def create_expression_output(expression, name, primary_field_name,
                             fields, materials, variables,
                             functions=None, mode='eval', term_mode=None,
                             extra_args=None, verbose=True, kwargs=None,
                             min_level=0, max_level=1, eps=1e-4):
    """
    Create output mesh and data for the expression using the adaptive
    linearizer.

    Parameters
    ----------
    expression : str
        The expression to evaluate.
    name : str
        The name of the data.
    primary_field_name : str
        The name of field that defines the element groups and polynomial
        spaces.
    fields : dict
        The dictionary of fields used in `variables`.
    materials : Materials instance
        The materials used in the expression.
    variables : Variables instance
        The variables used in the expression.
    functions : Functions instance, optional
        The user functions for materials etc.
    mode : one of 'eval', 'el_avg', 'qp'
        The evaluation mode - 'qp' requests the values in quadrature points,
        'el_avg' element averages and 'eval' means integration over
        each term region.
    term_mode : str
        The term call mode - some terms support different call modes
        and depending on the call mode different values are
        returned.
    extra_args : dict, optional
        Extra arguments to be passed to terms in the expression.
    verbose : bool
        If False, reduce verbosity.
    kwargs : dict, optional
        The variables (dictionary of (variable name) : (Variable
        instance)) to be used in the expression.
    min_level : int
        The minimum required level of mesh refinement.
    max_level : int
        The maximum level of mesh refinement.
    eps : float
        The relative tolerance parameter of mesh adaptivity.

    Returns
    -------
    out : dict
        The output dictionary.
    """
    field = fields[primary_field_name]
    vertex_coors = field.coors[:field.n_vertex_dof, :]

    ps = field.poly_space
    gps = field.gel.poly_space
    vertex_conn = field.econn[:, :field.gel.n_vertex]

    eval_dofs = get_eval_expression(expression,
                                    fields, materials, variables,
                                    functions=functions,
                                    mode=mode, extra_args=extra_args,
                                    verbose=verbose, kwargs=kwargs)
    eval_coors = get_eval_coors(vertex_coors, vertex_conn, gps)

    (level, coors, conn,
     vdofs, mat_ids) = create_output(eval_dofs, eval_coors,
                                     vertex_conn.shape[0], ps,
                                     min_level=min_level,
                                     max_level=max_level, eps=eps)

    mesh = Mesh.from_data('linearized_mesh', coors, None, [conn], [mat_ids],
                          field.domain.mesh.descs)

    out = {}
    out[name] = Struct(name='output_data', mode='vertex',
                       data=vdofs, var_name=name, dofs=None,
                       mesh=mesh, level=level)

    out = convert_complex_output(out)

    return out
예제 #54
0
파일: save_basis.py 프로젝트: ahah43/sfepy
def main():
    parser = ArgumentParser(description=__doc__)
    parser.add_argument('--version', action='version', version='%(prog)s')
    parser.add_argument('-b',
                        '--basis',
                        metavar='name',
                        action='store',
                        dest='basis',
                        default='lagrange',
                        help=helps['basis'])
    parser.add_argument('-d',
                        '--derivative',
                        metavar='d',
                        type=int,
                        action='store',
                        dest='derivative',
                        default=0,
                        help=helps['derivative'])
    parser.add_argument('-n',
                        '--max-order',
                        metavar='order',
                        type=int,
                        action='store',
                        dest='max_order',
                        default=2,
                        help=helps['max_order'])
    parser.add_argument('-g',
                        '--geometry',
                        metavar='name',
                        action='store',
                        dest='geometry',
                        default='2_4',
                        help=helps['geometry'])
    parser.add_argument('-m',
                        '--mesh',
                        metavar='mesh',
                        action='store',
                        dest='mesh',
                        default=None,
                        help=helps['mesh'])
    parser.add_argument('--permutations',
                        metavar='permutations',
                        action='store',
                        dest='permutations',
                        default=None,
                        help=helps['permutations'])
    parser.add_argument('--dofs',
                        metavar='dofs',
                        action='store',
                        dest='dofs',
                        default=None,
                        help=helps['dofs'])
    parser.add_argument('-l',
                        '--lin-options',
                        metavar='options',
                        action='store',
                        dest='lin_options',
                        default='min_level=2,max_level=5,eps=1e-3',
                        help=helps['lin_options'])
    parser.add_argument('--plot-dofs',
                        action='store_true',
                        dest='plot_dofs',
                        default=False,
                        help=helps['plot_dofs'])
    parser.add_argument('output_dir')
    options = parser.parse_args()

    output_dir = options.output_dir

    output('polynomial space:', options.basis)
    output('max. order:', options.max_order)

    lin = Struct(kind='adaptive', min_level=2, max_level=5, eps=1e-3)
    for opt in options.lin_options.split(','):
        key, val = opt.split('=')
        setattr(lin, key, eval(val))

    if options.mesh is None:
        dim, n_ep = int(options.geometry[0]), int(options.geometry[2])
        output('reference element geometry:')
        output('  dimension: %d, vertices: %d' % (dim, n_ep))

        gel = GeometryElement(options.geometry)
        gps = PolySpace.any_from_args(None, gel, 1, base=options.basis)
        ps = PolySpace.any_from_args(None,
                                     gel,
                                     options.max_order,
                                     base=options.basis)

        n_digit, _format = get_print_info(ps.n_nod, fill='0')
        name_template = os.path.join(output_dir, 'bf_%s.vtk' % _format)
        for ip in get_dofs(options.dofs, ps.n_nod):
            output('shape function %d...' % ip)

            def eval_dofs(iels, rx):
                if options.derivative == 0:
                    bf = ps.eval_base(rx).squeeze()
                    rvals = bf[None, :, ip:ip + 1]

                else:
                    bfg = ps.eval_base(rx, diff=True)
                    rvals = bfg[None, ..., ip]

                return rvals

            def eval_coors(iels, rx):
                bf = gps.eval_base(rx).squeeze()
                coors = nm.dot(bf, gel.coors)[None, ...]
                return coors

            (level, coors, conn, vdofs,
             mat_ids) = create_output(eval_dofs,
                                      eval_coors,
                                      1,
                                      ps,
                                      min_level=lin.min_level,
                                      max_level=lin.max_level,
                                      eps=lin.eps)
            out = {
                'bf':
                Struct(name='output_data',
                       mode='vertex',
                       data=vdofs,
                       var_name='bf',
                       dofs=None)
            }

            mesh = Mesh.from_data('bf_mesh', coors, None, [conn], [mat_ids],
                                  [options.geometry])

            name = name_template % ip
            ensure_path(name)
            mesh.write(name, out=out)

            output('...done (%s)' % name)

    else:
        mesh = Mesh.from_file(options.mesh)
        output('mesh geometry:')
        output('  dimension: %d, vertices: %d, elements: %d' %
               (mesh.dim, mesh.n_nod, mesh.n_el))

        if options.permutations:
            if options.permutations == 'all':
                from sfepy.linalg import cycle
                gel = GeometryElement(mesh.descs[0])
                n_perms = gel.get_conn_permutations().shape[0]
                all_permutations = [ii for ii in cycle(mesh.n_el * [n_perms])]

            else:
                all_permutations = [
                    int(ii) for ii in options.permutations.split(',')
                ]
                all_permutations = nm.array(all_permutations)
                np = len(all_permutations)
                all_permutations.shape = (np / mesh.n_el, mesh.n_el)

            output('using connectivity permutations:\n', all_permutations)

        else:
            all_permutations = [None]

        for ip, permutations in enumerate(all_permutations):
            if permutations is None:
                suffix = ''

            else:
                suffix = '_' + '_'.join('%d' % ii for ii in permutations)

            save_basis_on_mesh(mesh, options, output_dir, lin, permutations,
                               suffix)
예제 #55
0
def main(argv):
    if argv is None:
        argv = sys.argv[1:]

    parser = create_argument_parser()
    args = parser.parse_args(argv)

    conf_file_name = args.problem_file

    pc = get_parametrized_conf(conf_file_name, args)

    if args.output_dir is None:
        output_folder = pjoin(outputs_folder, "output", pc.example_name)
    elif "{}" in args.output_dir:
        output_folder = args.output_dir.format(pc.example_name)
    else:
        output_folder = args.output_dir

    output_name_trunk_folder = pjoin(output_folder, str(pc.approx_order) + "/")

    configure_output({
        'output_screen':
        not args.no_output_screen,
        'output_log_name':
        pjoin(output_name_trunk_folder, "last_run.txt")
    })

    output("Processing conf file {}".format(conf_file_name))
    output("----------------Running--------------------------")
    output("{}: {}".format(pc.example_name, time.asctime()))

    output_name_trunk_name = pc.example_name + str(pc.approx_order)
    output_name_trunk = pjoin(output_name_trunk_folder, output_name_trunk_name)
    ensure_path(output_name_trunk_folder)
    output_format = "{}.*.{}".format(
        output_name_trunk, pc.options.output_format if hasattr(
            pc.options, "output_format") else "vtk")

    output("Output set to {}, clearing.".format(output_format))
    clear_folder(output_format, confirm=False)

    sa = PDESolverApp(
        pc,
        Struct(output_filename_trunk=output_name_trunk,
               save_ebc=False,
               save_ebc_nodes=False,
               save_region=False,
               save_regions=False,
               save_regions_as_groups=False,
               save_field_meshes=False,
               solve_not=False), "sfepy")
    tt = time.process_time()
    sa()
    elapsed = time.process_time() - tt
    output("{}: {}".format(pc.example_name, time.asctime()))
    output("------------------Finished------------------\n\n")

    if pc.dim == 1 and args.doplot:
        if pc.transient:
            load_times = min(pc.options.save_times, sa.problem.ts.n_step)
            load_and_plot_fun(
                output_name_trunk_folder,
                output_name_trunk_name,
                pc.t0,
                pc.t1,
                load_times,
                pc.get_ic,
                # exact=getattr(pc, "analytic_sol", None),
                polar=False,
                compare=False)
        else:
            load_times = 1
            load_and_plot_fun(output_name_trunk_folder, output_name_trunk_name,
                              pc.t0, pc.t1, load_times)
def solve_problem(mesh_filename, options, comm):
    order = options.order

    rank, size = comm.Get_rank(), comm.Get_size()

    output('rank', rank, 'of', size)

    mesh = Mesh.from_file(mesh_filename)

    if rank == 0:
        cell_tasks = pl.partition_mesh(mesh,
                                       size,
                                       use_metis=options.metis,
                                       verbose=True)

    else:
        cell_tasks = None

    domain = FEDomain('domain', mesh)
    omega = domain.create_region('Omega', 'all')
    field = Field.from_args('fu', nm.float64, 1, omega, approx_order=order)

    output('distributing field %s...' % field.name)
    tt = time.clock()

    distribute = pl.distribute_fields_dofs
    lfds, gfds = distribute([field],
                            cell_tasks,
                            is_overlap=True,
                            save_inter_regions=options.save_inter_regions,
                            output_dir=options.output_dir,
                            comm=comm,
                            verbose=True)
    lfd = lfds[0]

    output('...done in', time.clock() - tt)

    if rank == 0:
        dof_maps = gfds[0].dof_maps
        id_map = gfds[0].id_map

        if options.verify:
            verify_save_dof_maps(field,
                                 cell_tasks,
                                 dof_maps,
                                 id_map,
                                 options,
                                 verbose=True)

        if options.plot:
            ppd.plot_partitioning([None, None], field, cell_tasks, gfds[0],
                                  options.output_dir, size)

    output('creating local problem...')
    tt = time.clock()

    omega_gi = Region.from_cells(lfd.cells, field.domain)
    omega_gi.finalize()
    omega_gi.update_shape()

    pb = create_local_problem(omega_gi, order)

    output('...done in', time.clock() - tt)

    variables = pb.get_variables()
    eqs = pb.equations

    u_i = variables['u_i']
    field_i = u_i.field

    if options.plot:
        ppd.plot_local_dofs([None, None], field, field_i, omega_gi,
                            options.output_dir, rank)

    output('allocating global system...')
    tt = time.clock()

    sizes, drange = pl.get_sizes(lfd.petsc_dofs_range, field.n_nod, 1)
    output('sizes:', sizes)
    output('drange:', drange)

    pdofs = pl.get_local_ordering(field_i, lfd.petsc_dofs_conn)

    output('pdofs:', pdofs)

    pmtx, psol, prhs = pl.create_petsc_system(pb.mtx_a,
                                              sizes,
                                              pdofs,
                                              drange,
                                              is_overlap=True,
                                              comm=comm,
                                              verbose=True)

    output('...done in', time.clock() - tt)

    output('evaluating local problem...')
    tt = time.clock()

    state = State(variables)
    state.fill(0.0)
    state.apply_ebc()

    rhs_i = eqs.eval_residuals(state())
    # This must be after pl.create_petsc_system() call!
    mtx_i = eqs.eval_tangent_matrices(state(), pb.mtx_a)

    output('...done in', time.clock() - tt)

    output('assembling global system...')
    tt = time.clock()

    pl.apply_ebc_to_matrix(mtx_i, u_i.eq_map.eq_ebc)
    pl.assemble_rhs_to_petsc(prhs,
                             rhs_i,
                             pdofs,
                             drange,
                             is_overlap=True,
                             comm=comm,
                             verbose=True)
    pl.assemble_mtx_to_petsc(pmtx,
                             mtx_i,
                             pdofs,
                             drange,
                             is_overlap=True,
                             comm=comm,
                             verbose=True)

    output('...done in', time.clock() - tt)

    output('creating solver...')
    tt = time.clock()

    conf = Struct(method='cg',
                  precond='gamg',
                  sub_precond='none',
                  i_max=10000,
                  eps_a=1e-50,
                  eps_r=1e-5,
                  eps_d=1e4,
                  verbose=True)
    status = {}
    ls = PETScKrylovSolver(conf, comm=comm, mtx=pmtx, status=status)

    output('...done in', time.clock() - tt)

    output('solving...')
    tt = time.clock()

    psol = ls(prhs, psol, conf)

    psol_i = pl.create_local_petsc_vector(pdofs)
    gather, scatter = pl.create_gather_scatter(pdofs, psol_i, psol, comm=comm)

    scatter(psol_i, psol)

    sol0_i = state() - psol_i[...]
    psol_i[...] = sol0_i

    gather(psol, psol_i)

    output('...done in', time.clock() - tt)

    output('saving solution...')
    tt = time.clock()

    u_i.set_data(sol0_i)
    out = u_i.create_output()

    filename = os.path.join(options.output_dir, 'sol_%02d.h5' % comm.rank)
    pb.domain.mesh.write(filename, io='auto', out=out)

    gather_to_zero = pl.create_gather_to_zero(psol)

    psol_full = gather_to_zero(psol)

    if comm.rank == 0:
        sol = psol_full[...].copy()[id_map]

        u = FieldVariable('u',
                          'parameter',
                          field,
                          primary_var_name='(set-to-None)')

        filename = os.path.join(options.output_dir, 'sol.h5')
        if (order == 1) or (options.linearization == 'strip'):
            out = u.create_output(sol)
            mesh.write(filename, io='auto', out=out)

        else:
            out = u.create_output(sol,
                                  linearization=Struct(kind='adaptive',
                                                       min_level=0,
                                                       max_level=order,
                                                       eps=1e-3))

            out['u'].mesh.write(filename, io='auto', out=out)

    output('...done in', time.clock() - tt)

    if options.show:
        plt.show()
예제 #57
0
    def process_options(self):
        get = self.options.get

        return Struct(eigensolver=get('eigensolver', 'eig.sgscipy'))
예제 #58
0
    def __init__(self, name, mesh, share_mesh=True, n_point=None, **kwargs):
        """
        Parameters
        ----------
        name : str
            The probe name, set automatically by the subclasses.
        mesh : Mesh instance
            The FE mesh where the variables to be probed are defined.
        share_mesh : bool
            Set to True to indicate that all the probes will work on the same
            mesh. Certain data are then computed only for the first probe and
            cached.
        n_point : int
           The (fixed) number of probe points, when positive. When non-positive,
           the number of points is adaptively increased starting from -n_point,
           until the neighboring point distance is less than the diameter of the
           elements enclosing the points. When None, it is set to -10.

        For additional parameters see the __init__() docstrings of the
        subclasses.

        Notes
        -----
        If the mesh contains vertices that are not contained in any element, we
        shift coordinates of such vertices so that they never match in the
        nearest node search.
        """
        Struct.__init__(self, name=name, mesh=mesh, **kwargs)

        self.set_n_point(n_point)

        self.options = Struct(close_limit=0.1, size_hint=None)

        self.is_refined = False

        tt = time.clock()
        if share_mesh:
            if Probe.cache.iconn is None:
                offsets, iconn = make_inverse_connectivity(mesh.conns,
                                                           mesh.n_nod,
                                                           ret_offsets=True)
                Probe.cache.iconn = iconn
                Probe.cache.offsets = offsets
            self.cache = Probe.cache

        else:
            offsets, iconn = make_inverse_connectivity(mesh.conns,
                                                       mesh.n_nod,
                                                       ret_offsets=True)
            self.cache = Struct(name='probe_cache',
                                offsets=offsets,
                                iconn=iconn,
                                kdtree=None)
        output('iconn: %f s' % (time.clock() - tt))

        i_bad = nm.where(nm.diff(self.cache.offsets) == 0)[0]
        if len(i_bad):
            bbox = mesh.get_bounding_box()
            mesh.coors[i_bad] = bbox[1] + bbox[1] - bbox[0]
            output('warning: some vertices are not in any element!')
            output('warning: vertex-based results will be wrong!')

        tt = time.clock()
        if share_mesh:
            if Probe.cache.kdtree is None:
                self.cache.kdtree = KDTree(mesh.coors)

        else:
            self.cache.kdtree = KDTree(mesh.coors)

        output('kdtree: %f s' % (time.clock() - tt))
예제 #59
0
 def __init__(self, name, problem, expressions, labels):
     Struct.__init__(self,
                     name=name,
                     problem=problem,
                     expressions=expressions,
                     labels=labels)
예제 #60
0
def _gen_common_data(orders, gels, report):
    import sfepy
    from sfepy.base.base import Struct
    from sfepy.linalg import combine
    from sfepy.discrete import FieldVariable, Integral
    from sfepy.discrete.fem import Mesh, FEDomain, Field
    from sfepy.discrete.common.global_interp import get_ref_coors

    bases = ([ii
              for ii in combine([['2_4', '3_8'], ['lagrange', 'lobatto']])] +
             [ii for ii in combine([['2_3', '3_4'], ['lagrange']])])
    for geom, poly_space_base in bases:
        report('geometry: %s, base: %s' % (geom, poly_space_base))

        order = orders[geom]
        integral = Integral('i', order=order)

        aux = '' if geom in ['2_4', '3_8'] else 'z'
        mesh0 = Mesh.from_file('meshes/elements/%s_2%s.mesh' % (geom, aux),
                               prefix_dir=sfepy.data_dir)
        gel = gels[geom]

        perms = gel.get_conn_permutations()

        qps, qp_weights = integral.get_qp(gel.surface_facet.name)
        zz = nm.zeros_like(qps[:, :1])
        qps = nm.hstack(([qps] + [zz]))

        shift = shifts[geom]
        rcoors = nm.ascontiguousarray(qps + shift[:1, :] - shift[1:, :])
        ccoors = nm.ascontiguousarray(qps + shift[:1, :] + shift[1:, :])

        for ir, pr in enumerate(perms):
            for ic, pc in enumerate(perms):
                report('ir: %d, ic: %d' % (ir, ic))
                report('pr: %s, pc: %s' % (pr, pc))

                mesh = mesh0.copy()
                conn = mesh.get_conn(gel.name)
                conn[0, :] = conn[0, pr]
                conn[1, :] = conn[1, pc]

                cache = Struct(mesh=mesh)

                domain = FEDomain('domain', mesh)
                omega = domain.create_region('Omega', 'all')
                region = domain.create_region('Facet', rsels[geom], 'facet')
                field = Field.from_args('f',
                                        nm.float64,
                                        shape=1,
                                        region=omega,
                                        approx_order=order,
                                        poly_space_base=poly_space_base)
                var = FieldVariable('u', 'unknown', field)
                report('# dofs: %d' % var.n_dof)

                vec = nm.empty(var.n_dof, dtype=var.dtype)

                ap = field.ap
                ps = ap.interp.poly_spaces['v']

                dofs = field.get_dofs_in_region(region, merge=False)
                edofs, fdofs = nm.unique(dofs[1]), nm.unique(dofs[2])

                rrc, rcells, rstatus = get_ref_coors(field,
                                                     rcoors,
                                                     cache=cache)
                crc, ccells, cstatus = get_ref_coors(field,
                                                     ccoors,
                                                     cache=cache)
                assert_((rstatus == 0).all() and (cstatus == 0).all())

                yield (geom, poly_space_base, qp_weights, mesh, ir, ic, ap, ps,
                       rrc, rcells[0], crc, ccells[0], vec, edofs, fdofs)