def test_interpolation_two_meshes(self): from sfepy import data_dir from sfepy.fem import Mesh, Domain, H1NodalVolumeField, Variables m1 = Mesh('source mesh', data_dir + '/meshes/3d/block.mesh') m2 = Mesh('target mesh', data_dir + '/meshes/3d/cube_medium_tetra.mesh') m2.coors *= 2.0 bbox = m1.get_bounding_box() dd = bbox[1, :] - bbox[0, :] data = nm.sin(4.0 * nm.pi * m1.coors[:,0:1] / dd[0]) \ * nm.cos(4.0 * nm.pi * m1.coors[:,1:2] / dd[1]) variables1 = { 'u': ('unknown field', 'scalar_tp', 0), 'v': ('test field', 'scalar_tp', 'u'), } variables2 = { 'u': ('unknown field', 'scalar_si', 0), 'v': ('test field', 'scalar_si', 'u'), } d1 = Domain('d1', m1) omega1 = d1.create_region('Omega', 'all') field1 = H1NodalVolumeField('scalar_tp', nm.float64, (1, 1), omega1, approx_order=1) ff1 = {field1.name: field1} d2 = Domain('d2', m2) omega2 = d2.create_region('Omega', 'all') field2 = H1NodalVolumeField('scalar_si', nm.float64, (1, 1), omega2, approx_order=0) ff2 = {field2.name: field2} vv1 = Variables.from_conf(transform_variables(variables1), ff1) u1 = vv1['u'] u1.set_from_mesh_vertices(data) vv2 = Variables.from_conf(transform_variables(variables2), ff2) u2 = vv2['u'] # Performs interpolation, if other field differs from self.field # or, in particular, is defined on a different mesh. u2.set_from_other(u1, strategy='interpolation', close_limit=0.1) fname = in_dir(self.options.out_dir) u1.save_as_mesh(fname('test_mesh_interp_block_scalar.vtk')) u2.save_as_mesh(fname('test_mesh_interp_cube_scalar.vtk')) return True
def test_interpolation_two_meshes(self): from sfepy import data_dir from sfepy.fem import Mesh, Domain, H1NodalVolumeField, Variables m1 = Mesh('source mesh', data_dir + '/meshes/3d/block.mesh') m2 = Mesh('target mesh', data_dir + '/meshes/3d/cube_medium_tetra.mesh') m2.coors *= 2.0 bbox = m1.get_bounding_box() dd = bbox[1,:] - bbox[0,:] data = nm.sin(4.0 * nm.pi * m1.coors[:,0:1] / dd[0]) \ * nm.cos(4.0 * nm.pi * m1.coors[:,1:2] / dd[1]) variables1 = { 'u' : ('unknown field', 'scalar_tp', 0), 'v' : ('test field', 'scalar_tp', 'u'), } variables2 = { 'u' : ('unknown field', 'scalar_si', 0), 'v' : ('test field', 'scalar_si', 'u'), } d1 = Domain('d1', m1) omega1 = d1.create_region('Omega', 'all') field1 = H1NodalVolumeField('scalar_tp', nm.float64, (1,1), omega1, approx_order=1) ff1 = {field1.name : field1} d2 = Domain('d2', m2) omega2 = d2.create_region('Omega', 'all') field2 = H1NodalVolumeField('scalar_si', nm.float64, (1,1), omega2, approx_order=0) ff2 = {field2.name : field2} vv1 = Variables.from_conf(transform_variables(variables1), ff1) u1 = vv1['u'] u1.set_from_mesh_vertices(data) vv2 = Variables.from_conf(transform_variables(variables2), ff2) u2 = vv2['u'] # Performs interpolation, if other field differs from self.field # or, in particular, is defined on a different mesh. u2.set_from_other(u1, strategy='interpolation', close_limit=0.1) fname = in_dir(self.options.out_dir) u1.save_as_mesh(fname('test_mesh_interp_block_scalar.vtk')) u2.save_as_mesh(fname('test_mesh_interp_cube_scalar.vtk')) return True
def do_interpolation(m2, m1, data, field_name, force=False): """Interpolate data from m1 to m2. """ from sfepy.fem import Domain, H1NodalVolumeField, Variables fields = { 'scalar_si': ((1, 1), 'Omega', 2), 'vector_si': ((3, 1), 'Omega', 2), 'scalar_tp': ((1, 1), 'Omega', 1), 'vector_tp': ((3, 1), 'Omega', 1), } d1 = Domain('d1', m1) omega1 = d1.create_region('Omega', 'all') f = fields[field_name] field1 = H1NodalVolumeField('f', nm.float64, f[0], d1.regions[f[1]], approx_order=f[2]) ff = {field1.name: field1} vv = Variables.from_conf(transform_variables(variables), ff) u1 = vv['u'] u1.set_from_mesh_vertices(data) d2 = Domain('d2', m2) omega2 = d2.create_region('Omega', 'all') field2 = H1NodalVolumeField('f', nm.float64, f[0], d2.regions[f[1]], approx_order=f[2]) ff2 = {field2.name: field2} vv2 = Variables.from_conf(transform_variables(variables), ff2) u2 = vv2['u'] if not force: # Performs interpolation, if other field differs from self.field # or, in particular, is defined on a different mesh. u2.set_from_other(u1, strategy='interpolation', close_limit=0.5) else: coors = u2.field.get_coor() vals = u1.evaluate_at(coors, close_limit=0.5) u2.set_data(vals) return u1, u2
def __init__(self, equations, setup=True, make_virtual=False, verbose=True): Container.__init__(self, equations) self.variables = Variables(self.collect_variables()) self.materials = Materials(self.collect_materials()) self.domain = self.get_domain() self.active_bcs = set() if setup: self.setup(make_virtual=make_virtual, verbose=verbose)
def standalone_setup(self): from sfepy.fem import create_adof_conns, Variables conn_info = {'aux' : self.get_conn_info()} adcs = create_adof_conns(conn_info, None) variables = Variables(self.get_variables()) variables.set_adof_conns(adcs) materials = self.get_materials(join=True) for mat in materials: mat.time_update(None, [Struct(terms=[self])])
def do_interpolation(m2, m1, data, field_name, force=False): """Interpolate data from m1 to m2. """ from sfepy.fem import Domain, H1NodalVolumeField, Variables fields = { 'scalar_si' : ((1,1), 'Omega', 2), 'vector_si' : ((3,1), 'Omega', 2), 'scalar_tp' : ((1,1), 'Omega', 1), 'vector_tp' : ((3,1), 'Omega', 1), } d1 = Domain('d1', m1) omega1 = d1.create_region('Omega', 'all') f = fields[field_name] field1 = H1NodalVolumeField('f', nm.float64, f[0], d1.regions[f[1]], approx_order=f[2]) ff = {field1.name : field1} vv = Variables.from_conf(transform_variables(variables), ff) u1 = vv['u'] u1.set_from_mesh_vertices(data) d2 = Domain('d2', m2) omega2 = d2.create_region('Omega', 'all') field2 = H1NodalVolumeField('f', nm.float64, f[0], d2.regions[f[1]], approx_order=f[2]) ff2 = {field2.name : field2} vv2 = Variables.from_conf(transform_variables(variables), ff2) u2 = vv2['u'] if not force: # Performs interpolation, if other field differs from self.field # or, in particular, is defined on a different mesh. u2.set_from_other(u1, strategy='interpolation', close_limit=0.5) else: coors = u2.field.get_coor() vals = u1.evaluate_at(coors, close_limit=0.5) u2.set_data(vals) return u1, u2
def __init__(self, equations, setup=True, caches=None, cache_override=False, make_virtual=False, verbose=True): Container.__init__(self, equations) self.variables = Variables(self.collect_variables()) self.caches = get_default(caches, DataCaches()) self.clear_geometries() if setup: self.setup(cache_override=cache_override, make_virtual=make_virtual, verbose=verbose)
def __init__(self, equations): Container.__init__(self, equations) self.variables = Variables(self.collect_variables()) self.materials = Materials(self.collect_materials()) self.domain = self.get_domain() self.active_bcs = set() self.collect_conn_info()
def test_pbc(self): from sfepy.fem import Variables, Conditions problem = self.problem conf = self.conf ebcs = Conditions.from_conf(conf.ebcs, problem.domain.regions) epbcs = Conditions.from_conf(conf.epbcs, problem.domain.regions) variables = Variables.from_conf(conf.variables, problem.fields) variables.equation_mapping(ebcs, epbcs, None, problem.functions) state = variables.create_state_vector() variables.apply_ebc(state) return variables.has_ebc(state)
def test_pbc( self ): from sfepy.fem import Variables, Conditions problem = self.problem conf = self.conf ebcs = Conditions.from_conf(conf.ebcs, problem.domain.regions) epbcs = Conditions.from_conf(conf.epbcs, problem.domain.regions) variables = Variables.from_conf(conf.variables, problem.fields) variables.equation_mapping(ebcs, epbcs, None, problem.functions) state = variables.create_state_vector() variables.apply_ebc(state) return variables.has_ebc(state)
def test_invariance_qp(self): from sfepy import data_dir from sfepy.fem import (Mesh, Domain, H1NodalVolumeField, Variables, Integral) from sfepy.terms import Term from sfepy.fem.mappings import get_physical_qps mesh = Mesh('source mesh', data_dir + '/meshes/3d/block.mesh') bbox = mesh.get_bounding_box() dd = bbox[1, :] - bbox[0, :] data = nm.sin(4.0 * nm.pi * mesh.coors[:,0:1] / dd[0]) \ * nm.cos(4.0 * nm.pi * mesh.coors[:,1:2] / dd[1]) variables = { 'u': ('unknown field', 'scalar_tp', 0), 'v': ('test field', 'scalar_tp', 'u'), } domain = Domain('domain', mesh) omega = domain.create_region('Omega', 'all') field = H1NodalVolumeField('scalar_tp', nm.float64, 1, omega, approx_order=1) ff = {field.name: field} vv = Variables.from_conf(transform_variables(variables), ff) u = vv['u'] u.set_from_mesh_vertices(data) integral = Integral('i', order=2) term = Term.new('ev_volume_integrate(u)', integral, omega, u=u) term.setup() val1, _ = term.evaluate(mode='qp') val1 = val1.ravel() qps = get_physical_qps(omega, integral) coors = qps.get_merged_values() val2 = u.evaluate_at(coors).ravel() self.report('max. difference:', nm.abs(val1 - val2).max()) ok = nm.allclose(val1, val2, rtol=0.0, atol=1e-12) self.report('invariance in qp: %s' % ok) return ok
def test_consistency_d_dw( self ): from sfepy.fem import Function, Variables ok = True pb = self.problem for aux in test_terms: term_template, (prefix, par_name, d_vars, dw_vars, mat_mode) = aux print term_template, prefix, par_name, d_vars, dw_vars, mat_mode term1 = term_template % ((prefix,) + d_vars) variables = Variables.from_conf(self.conf.variables, pb.fields) for var_name in d_vars: var = variables[var_name] n_dof = var.field.n_nod * var.field.shape[0] aux = nm.arange( n_dof, dtype = nm.float64 ) var.data_from_data(aux) pb.materials['m'].function.set_extra_args(term = mat_mode) if prefix == 'd': val1 = pb.evaluate(term1, var_dict=variables.as_dict()) else: val1 = pb.evaluate(term1, call_mode='d_eval', var_dict=variables.as_dict()) self.report( '%s: %s' % (term1, val1) ) term2 = term_template % (('dw',) + dw_vars[:2]) vec, vv = pb.evaluate(term2, mode='weak', var_dict=variables.as_dict(), ret_variables=True) pvec = vv.get_state_part_view(vec, dw_vars[2]) val2 = nm.dot( variables[par_name](), pvec ) self.report( '%s: %s' % (term2, val2) ) err = nm.abs( val1 - val2 ) / nm.abs( val1 ) _ok = err < 1e-12 self.report( 'relative difference: %e -> %s' % (err, _ok) ) ok = ok and _ok return ok
def test_consistency_d_dw(self): from sfepy.fem import Variables ok = True pb = self.problem for aux in test_terms: term_template, (prefix, par_name, d_vars, dw_vars) = aux print term_template, prefix, par_name, d_vars, dw_vars term1 = term_template % ((prefix, ) + d_vars) variables = Variables.from_conf(self.conf.variables, pb.fields) for var_name in d_vars: var = variables[var_name] n_dof = var.field.n_nod * var.field.shape[0] aux = nm.arange(n_dof, dtype=nm.float64) var.set_data(aux) if prefix == 'd': val1 = pb.evaluate(term1, var_dict=variables.as_dict()) else: val1 = pb.evaluate(term1, call_mode='d_eval', var_dict=variables.as_dict()) self.report('%s: %s' % (term1, val1)) term2 = term_template % (('dw', ) + dw_vars[:2]) vec, vv = pb.evaluate(term2, mode='weak', var_dict=variables.as_dict(), ret_variables=True) pvec = vv.get_state_part_view(vec, dw_vars[2]) val2 = nm.dot(variables[par_name](), pvec) self.report('%s: %s' % (term2, val2)) err = nm.abs(val1 - val2) / nm.abs(val1) _ok = err < 1e-12 self.report('relative difference: %e -> %s' % (err, _ok)) ok = ok and _ok return ok
def test_invariance_qp(self): from sfepy import data_dir from sfepy.fem import (Mesh, Domain, H1NodalVolumeField, Variables, Integral) from sfepy.terms import Term from sfepy.fem.mappings import get_physical_qps mesh = Mesh('source mesh', data_dir + '/meshes/3d/block.mesh') bbox = mesh.get_bounding_box() dd = bbox[1,:] - bbox[0,:] data = nm.sin(4.0 * nm.pi * mesh.coors[:,0:1] / dd[0]) \ * nm.cos(4.0 * nm.pi * mesh.coors[:,1:2] / dd[1]) variables = { 'u' : ('unknown field', 'scalar_tp', 0), 'v' : ('test field', 'scalar_tp', 'u'), } domain = Domain('domain', mesh) omega = domain.create_region('Omega', 'all') field = H1NodalVolumeField('scalar_tp', nm.float64, 1, omega, approx_order=1) ff = {field.name : field} vv = Variables.from_conf(transform_variables(variables), ff) u = vv['u'] u.set_from_mesh_vertices(data) integral = Integral('i', order=2) term = Term.new('ev_volume_integrate(u)', integral, omega, u=u) term.setup() val1, _ = term.evaluate(mode='qp') val1 = val1.ravel() qps = get_physical_qps(omega, integral) coors = qps.get_merged_values() val2 = u.evaluate_at(coors).ravel() self.report('max. difference:', nm.abs(val1 - val2).max()) ok = nm.allclose(val1, val2, rtol=0.0, atol=1e-12) self.report('invariance in qp: %s' % ok) return ok
def test_consistency_d_dw(self): from sfepy.fem import Variables ok = True pb = self.problem for aux in test_terms: term_template, (prefix, par_name, d_vars, dw_vars) = aux print term_template, prefix, par_name, d_vars, dw_vars term1 = term_template % ((prefix,) + d_vars) variables = Variables.from_conf(self.conf.variables, pb.fields) for var_name in d_vars: var = variables[var_name] n_dof = var.field.n_nod * var.field.shape[0] aux = nm.arange(n_dof, dtype=nm.float64) var.set_data(aux) if prefix == "d": val1 = pb.evaluate(term1, var_dict=variables.as_dict()) else: val1 = pb.evaluate(term1, call_mode="d_eval", var_dict=variables.as_dict()) self.report("%s: %s" % (term1, val1)) term2 = term_template % (("dw",) + dw_vars[:2]) vec, vv = pb.evaluate(term2, mode="weak", var_dict=variables.as_dict(), ret_variables=True) pvec = vv.get_state_part_view(vec, dw_vars[2]) val2 = nm.dot(variables[par_name](), pvec) self.report("%s: %s" % (term2, val2)) err = nm.abs(val1 - val2) / nm.abs(val1) _ok = err < 1e-12 self.report("relative difference: %e -> %s" % (err, _ok)) ok = ok and _ok return ok
def make_term_args(arg_shapes, arg_kinds, arg_types, ats_mode, domain): from sfepy.base.base import basestr from sfepy.fem import Field, FieldVariable, Material, Variables, Materials from sfepy.mechanics.tensors import dim2sym omega = domain.regions['Omega'] dim = domain.shape.dim sym = dim2sym(dim) def _parse_scalar_shape(sh): if isinstance(sh, basestr): if sh == 'D': return dim elif sh == 'S': return sym elif sh == 'N': # General number ;) return 5 else: return int(sh) else: return sh def _parse_tuple_shape(sh): if isinstance(sh, basestr): return [_parse_scalar_shape(ii.strip()) for ii in sh.split(',')] else: return (int(sh), ) args = {} str_args = [] materials = [] variables = [] for ii, arg_kind in enumerate(arg_kinds): if ats_mode is not None: extended_ats = arg_types[ii] + ('/%s' % ats_mode) else: extended_ats = arg_types[ii] try: sh = arg_shapes[arg_types[ii]] except KeyError: sh = arg_shapes[extended_ats] if arg_kind.endswith('variable'): shape = _parse_scalar_shape(sh[0] if isinstance(sh, tuple) else sh) field = Field.from_args('f%d' % ii, nm.float64, shape, omega, approx_order=1) if arg_kind == 'virtual_variable': if sh[1] is not None: istate = arg_types.index(sh[1]) else: # Only virtual variable in arguments. istate = -1 # -> Make fake variable. var = FieldVariable('u-1', 'unknown', field, shape) var.set_constant(0.0) variables.append(var) var = FieldVariable('v', 'test', field, shape, primary_var_name='u%d' % istate) elif arg_kind == 'state_variable': var = FieldVariable('u%d' % ii, 'unknown', field, shape) var.set_constant(0.0) elif arg_kind == 'parameter_variable': var = FieldVariable('p%d' % ii, 'parameter', field, shape, primary_var_name='(set-to-None)') var.set_constant(0.0) variables.append(var) str_args.append(var.name) args[var.name] = var elif arg_kind.endswith('material'): if sh is None: # Switched-off opt_material. continue prefix = '' if isinstance(sh, basestr): aux = sh.split(':') if len(aux) == 2: prefix, sh = aux shape = _parse_tuple_shape(sh) if (len(shape) > 1) or (shape[0] > 1): # Array. values = { '%sc%d' % (prefix, ii): nm.ones(shape, dtype=nm.float64) } elif (len(shape) == 1) and (shape[0] == 1): # Single scalar as a special value. values = {'.c%d' % ii: 1.0} else: raise ValueError('wrong material shape! (%s)' % shape) mat = Material('m%d' % ii, values=values) materials.append(mat) str_args.append(mat.name + '.' + 'c%d' % ii) args[mat.name] = mat else: str_args.append('user%d' % ii) args[str_args[-1]] = None materials = Materials(materials) variables = Variables(variables) return args, str_args, materials, variables
def save_basis_on_mesh(mesh, options, output_dir, lin, permutations=None, suffix=''): if permutations is not None: mesh = mesh.copy() for ig, conn in enumerate(mesh.conns): gel = GeometryElement(mesh.descs[ig]) perms = gel.get_conn_permutations()[permutations] n_el, n_ep = conn.shape offsets = nm.arange(n_el) * n_ep conn[:] = conn.take(perms + offsets[:, None]) domain = Domain('domain', mesh) omega = domain.create_region('Omega', 'all') field = Field.from_args('f', nm.float64, shape=1, region=omega, approx_order=options.max_order, poly_space_base=options.basis) var = FieldVariable('u', 'unknown', field, 1) if options.plot_dofs: import sfepy.postprocess.plot_dofs as pd group = domain.groups[0] ax = pd.plot_mesh(None, mesh.coors, mesh.conns[0], group.gel.edges) ax = pd.plot_global_dofs(ax, field.get_coor(), field.aps[0].econn) ax = pd.plot_local_dofs(ax, field.get_coor(), field.aps[0].econn) if options.dofs is not None: ax = pd.plot_nodes(ax, field.get_coor(), field.aps[0].econn, field.aps[0].interp.poly_spaces['v'].nodes, get_dofs(options.dofs, var.n_dof)) pd.plt.show() output('dofs: %d' % var.n_dof) vec = nm.empty(var.n_dof, dtype=var.dtype) n_digit, _format = get_print_info(var.n_dof, fill='0') name_template = os.path.join(output_dir, 'dof_%s%s.vtk' % (_format, suffix)) for ip in get_dofs(options.dofs, var.n_dof): output('dof %d...' % ip) vec.fill(0.0) vec[ip] = 1.0 var.set_data(vec) if options.derivative == 0: out = var.create_output(vec, linearization=lin) else: out = create_expression_output('ev_grad.ie.Elements(u)', 'u', 'f', {'f': field}, None, Variables([var]), mode='qp', verbose=False, min_level=lin.min_level, max_level=lin.max_level, eps=lin.eps) name = name_template % ip ensure_path(name) out['u'].mesh.write(name, out=out) output('...done (%s)' % name)
def create_evaluable(expression, fields, materials, variables, integrals, regions=None, ebcs=None, epbcs=None, lcbcs=None, ts=None, functions=None, auto_init=False, mode='eval', extra_args=None, verbose=True, kwargs=None): """ Create evaluable object (equations and corresponding variables) from the `expression` string. Parameters ---------- expression : str The expression to evaluate. fields : dict The dictionary of fields used in `variables`. materials : Materials instance The materials used in the expression. variables : Variables instance The variables used in the expression. integrals : Integrals instance The integrals to be used. regions : Region instance or list of Region instances The region(s) to be used. If not given, the regions defined within the fields domain are used. ebcs : Conditions instance, optional The essential (Dirichlet) boundary conditions for 'weak' mode. epbcs : Conditions instance, optional The periodic boundary conditions for 'weak' mode. lcbcs : Conditions instance, optional The linear combination boundary conditions for 'weak' mode. ts : TimeStepper instance, optional The time stepper. functions : Functions instance, optional The user functions for boundary conditions, materials etc. auto_init : bool Set values of all variables to all zeros. mode : one of 'eval', 'el_avg', 'qp', 'weak' The evaluation mode - 'weak' means the finite element assembling, 'qp' requests the values in quadrature points, 'el_avg' element averages and 'eval' means integration over each term region. extra_args : dict, optional Extra arguments to be passed to terms in the expression. verbose : bool If False, reduce verbosity. kwargs : dict, optional The variables (dictionary of (variable name) : (Variable instance)) to be used in the expression. Returns ------- equation : Equation instance The equation that is ready to be evaluated. variables : Variables instance The variables used in the equation. """ if kwargs is None: kwargs = {} if regions is not None: if isinstance(regions, Region): regions = [regions] regions = OneTypeList(Region, regions) else: regions = fields[fields.keys()[0]].domain.regions # Create temporary variables. aux_vars = Variables(variables) if extra_args is None: extra_args = kwargs else: extra_args = copy(extra_args) extra_args.update(kwargs) if ts is not None: extra_args.update({'ts' : ts}) equations = Equations.from_conf({'tmp' : expression}, aux_vars, regions, materials, integrals, setup=False, user=extra_args, verbose=verbose) equations.collect_conn_info() # The true variables used in the expression. variables = equations.variables if auto_init: for var in variables: var.init_data(step=0) if mode == 'weak': setup_dof_conns(equations.conn_info, verbose=verbose) equations.time_update(ts, ebcs, epbcs, lcbcs, functions, verbose=verbose) else: setup_extra_data(equations.conn_info) return equations, variables
class Equations( Container ): @staticmethod def from_conf(conf, variables, regions, materials, integrals, setup=True, user=None, make_virtual=False, verbose=True): objs = OneTypeList(Equation) conf = copy(conf) ii = 0 for name, desc in conf.iteritems(): if verbose: output('equation "%s":' % name) output(desc) eq = Equation.from_desc(name, desc, variables, regions, materials, integrals, user=user) objs.append(eq) ii += 1 obj = Equations(objs, setup=setup, make_virtual=make_virtual, verbose=verbose) return obj def __init__(self, equations, setup=True, make_virtual=False, verbose=True): Container.__init__(self, equations) self.variables = Variables(self.collect_variables()) self.materials = Materials(self.collect_materials()) self.domain = self.get_domain() self.active_bcs = set() if setup: self.setup(make_virtual=make_virtual, verbose=verbose) def get_domain(self): domain = None for eq in self: for term in eq.terms: if term.has_region: domain = term.region.domain return domain def setup(self, make_virtual=False, verbose=True): self.collect_conn_info() # This uses the conn_info created above. self.dof_conns = {} setup_dof_conns(self.conn_info, dof_conns=self.dof_conns, make_virtual=make_virtual, verbose=verbose) def collect_materials(self): """ Collect materials present in the terms of all equations. """ materials = [] for eq in self: materials.extend(eq.collect_materials()) # Make the list items unique. materials = list(set(materials)) return materials def reset_materials(self): """ Clear material data so that next materials.time_update() is performed even for stationary materials. """ self.materials.reset() def collect_variables(self): """ Collect variables present in the terms of all equations. """ variables = [] for eq in self: variables.extend(eq.collect_variables()) # Make the list items unique. variables = list(set(variables)) return variables def get_variable(self, name): var = self.variables.get(name, msg_if_none='unknown variable! (%s)' % name) return var def collect_conn_info(self): """ Collect connectivity information as defined by the equations. """ self.conn_info = {} for eq in self: eq.collect_conn_info(self.conn_info) ## print_structs(self.conn_info) ## pause() return self.conn_info def get_variable_names( self ): """Return the list of names of all variables used in equations.""" vns = set() for eq in self: for term in eq.terms: vns.update( term.get_variable_names() ) return list( vns ) def invalidate_term_caches(self): """ Invalidate evaluate caches of variables present in equations. """ for var in self.variables: var.invalidate_evaluate_cache() def print_terms(self): """ Print names of equations and their terms. """ output('equations:') for eq in self: output(' %s:' % eq.name) for term in eq.terms: output(' %+.2e * %s.%d.%s(%s)' % (term.sign, term.name, term.integral.order, term.region.name, term.arg_str)) def time_update(self, ts, ebcs=None, epbcs=None, lcbcs=None, functions=None, problem=None, verbose=True): """ Update the equations for current time step. The update involves creating the mapping of active DOFs from/to all DOFs for all state variables, the setup of linear combination boundary conditions operators and the setup of active DOF connectivities. Parameters ---------- ts : TimeStepper instance The time stepper. ebcs : Conditions instance, optional The essential (Dirichlet) boundary conditions. epbcs : Conditions instance, optional The periodic boundary conditions. lcbcs : Conditions instance, optional The linear combination boundary conditions. functions : Functions instance, optional The user functions for boundary conditions, materials, etc. problem : ProblemDefinition instance, optional The problem that can be passed to user functions as a context. verbose : bool If False, reduce verbosity. Returns ------- graph_changed : bool The flag set to True if the current time step set of active boundary conditions differs from the set of the previous time step. """ self.variables.time_update(ts, functions, verbose=verbose) active_bcs = self.variables.equation_mapping(ebcs, epbcs, ts, functions, problem=problem) graph_changed = active_bcs != self.active_bcs self.active_bcs = active_bcs self.variables.setup_lcbc_operators(lcbcs, ts, functions) self.variables.setup_adof_conns() for eq in self: for term in eq.terms: term.time_update(ts) return graph_changed def time_update_materials(self, ts, mode='normal', problem=None, verbose=True): """ Update data materials for current time and possibly also state. Parameters ---------- ts : TimeStepper instance The time stepper. mode : 'normal', 'update' or 'force' The update mode, see :func:`sfepy.fem.materials.Material.time_update()`. problem : ProblemDefinition instance, optional The problem that can be passed to user functions as a context. verbose : bool If False, reduce verbosity. """ self.materials.time_update(ts, self, mode=mode, problem=problem, verbose=verbose) def setup_initial_conditions(self, ics, functions): self.variables.setup_initial_conditions(ics, functions) def get_graph_conns(self, any_dof_conn=False, rdcs=None, cdcs=None): """ Get DOF connectivities needed for creating tangent matrix graph. Parameters ---------- any_dof_conn : bool By default, only volume DOF connectivities are used, with the exception of trace surface DOF connectivities. If True, any kind of DOF connectivities is allowed. rdcs, cdcs : arrays, optional Additional row and column DOF connectivities, corresponding to the variables used in the equations. Returns ------- rdcs, cdcs : arrays The row and column DOF connectivities defining the matrix graph blocks. """ if rdcs is None: rdcs = [] cdcs = [] elif cdcs is None: cdcs = copy(rdcs) else: assert_(len(rdcs) == len(cdcs)) if rdcs is cdcs: # Make sure the lists are not the same object. rdcs = copy(rdcs) adcs = self.variables.adof_conns # Only volume dof connectivities are used, with the exception of trace # surface dof connectivities. shared = set() for key, ii, info in iter_dict_of_lists(self.conn_info, return_keys=True): rvar, cvar = info.virtual, info.state if (rvar is None) or (cvar is None): continue is_surface = rvar.is_surface or cvar.is_surface dct = info.dc_type.type if not (dct in ('volume', 'scalar') or is_surface or info.is_trace or any_dof_conn): continue rreg_name = info.get_region_name(can_trace=False) creg_name = info.get_region_name() for rig, cig in info.iter_igs(): rname = rvar.get_primary_name() rkey = (rname, rreg_name, dct, rig, False) ckey = (cvar.name, creg_name, dct, cig, info.is_trace) dc_key = (rkey, ckey) ## print dc_key if not dc_key in shared: try: rdcs.append(adcs[rkey]) cdcs.append(adcs[ckey]) except: debug() shared.add(dc_key) return rdcs, cdcs def create_matrix_graph(self, any_dof_conn=False, rdcs=None, cdcs=None, shape=None): """ Create tangent matrix graph, i.e. preallocate and initialize the sparse storage needed for the tangent matrix. Order of DOF connectivities is not important. Parameters ---------- any_dof_conn : bool By default, only volume DOF connectivities are used, with the exception of trace surface DOF connectivities. If True, any kind of DOF connectivities is allowed. rdcs, cdcs : arrays, optional Additional row and column DOF connectivities, corresponding to the variables used in the equations. shape : tuple, optional The required shape, if it is different from the shape determined by the equations variables. This may be needed if additional row and column DOF connectivities are passed in. Returns ------- matrix : csr_matrix The matrix graph in the form of a CSR matrix with preallocated structure and zero data. """ if not self.variables.has_virtuals(): output('no matrix (no test variables)!') return None shape = get_default(shape, self.variables.get_matrix_shape()) output( 'matrix shape:', shape ) if nm.prod( shape ) == 0: output( 'no matrix (zero size)!' ) return None rdcs, cdcs = self.get_graph_conns(any_dof_conn=any_dof_conn, rdcs=rdcs, cdcs=cdcs) if not len(rdcs): output('no matrix (empty dof connectivities)!') return None output( 'assembling matrix graph...' ) tt = time.clock() nnz, prow, icol = create_mesh_graph(shape[0], shape[1], len(rdcs), rdcs, cdcs) output( '...done in %.2f s' % (time.clock() - tt) ) output( 'matrix structural nonzeros: %d (%.2e%% fill)' \ % (nnz, float( nnz ) / nm.prod( shape ) ) ) ## print ret, prow, icol, nnz data = nm.zeros( (nnz,), dtype = self.variables.dtype ) matrix = sp.csr_matrix( (data, icol, prow), shape ) ## matrix.save( 'matrix', format = '%d %d %e\n' ) ## pause() return matrix ## # c: 02.04.2008, r: 02.04.2008 def init_time( self, ts ): pass ## # 08.06.2007, c def advance( self, ts ): for eq in self: for term in eq.terms: term.advance(ts) self.variables.advance(ts) ## # Interface to self.variables. def create_state_vector(self): return self.variables.create_state_vector() def create_stripped_state_vector(self): return self.variables.create_stripped_state_vector() def strip_state_vector(self, vec, follow_epbc=False): """ Strip a full vector by removing EBC dofs. Notes ----- If 'follow_epbc' is True, values of EPBC master dofs are not simply thrown away, but added to the corresponding slave dofs, just like when assembling. For vectors with state (unknown) variables it should be set to False, for assembled vectors it should be set to True. """ return self.variables.strip_state_vector(vec, follow_epbc=follow_epbc) def make_full_vec(self, svec, force_value=None): """ Make a full DOF vector satisfying E(P)BCs from a reduced DOF vector. """ return self.variables.make_full_vec(svec, force_value) def set_variables_from_state(self, vec, step=0): """ Set data (vectors of DOF values) of variables. Parameters ---------- data : array The state vector. step : int The time history step, 0 (default) = current. """ self.variables.set_data(vec, step=step) def get_state_parts(self, vec=None): """ Return parts of a state vector corresponding to individual state variables. Parameters ---------- vec : array, optional The state vector. If not given, then the data stored in the variables are returned instead. Returns ------- out : dict The dictionary of the state parts. """ return self.variables.get_state_parts(vec) def set_data(self, data, step=0, ignore_unknown=False): """ Set data (vectors of DOF values) of variables. Parameters ---------- data : array The dictionary of {variable_name : data vector}. step : int, optional The time history step, 0 (default) = current. ignore_unknown : bool, optional Ignore unknown variable names if `data` is a dict. """ self.variables.set_data(data, step=step, ignore_unknown=ignore_unknown) def apply_ebc(self, vec, force_values=None): """ Apply essential (Dirichlet) boundary conditions to a state vector. """ self.variables.apply_ebc(vec, force_values=force_values) def apply_ic(self, vec, force_values=None): """ Apply initial conditions to a state vector. """ self.variables.apply_ic(vec, force_values=force_values) def state_to_output(self, vec, fill_value=None, var_info=None, extend=True): return self.variables.state_to_output(vec, fill_value=fill_value, var_info=var_info, extend=extend) def get_lcbc_operator(self): return self.variables.get_lcbc_operator() def evaluate(self, mode='eval', dw_mode='vector', term_mode=None, asm_obj=None): """ Parameters ---------- mode : one of 'eval', 'el_avg', 'qp', 'weak' The evaluation mode. """ if mode == 'weak': out = asm_obj else: out = {} for eq in self: eout = eq.evaluate(mode=mode, dw_mode=dw_mode, term_mode=term_mode, asm_obj=asm_obj) if mode != 'weak': out[eq.name] = eout if (len(self) == 1) and (mode != 'weak'): out = out.popitem()[1] return out def eval_residuals(self, state, by_blocks=False, names=None): """ Evaluate (assemble) residual vectors. Parameters ---------- state : array The vector of DOF values. Note that it is needed only in nonlinear terms. by_blocks : bool If True, return the individual blocks composing the whole residual vector. Each equation should then correspond to one required block and should be named as `'block_name, test_variable_name, unknown_variable_name'`. names : list of str, optional Optionally, select only blocks with the given `names`, if `by_blocks` is True. Returns ------- out : array or dict of array The assembled residual vector. If `by_blocks` is True, a dictionary is returned instead, with keys given by `block_name` part of the individual equation names. """ self.set_variables_from_state(state) if by_blocks: names = get_default(names, self.names) out = {} get_indx = self.variables.get_indx for name in names: eq = self[name] key, rname, cname = [aux.strip() for aux in name.split(',')] ir = get_indx(rname, stripped=True, allow_dual=True) residual = self.create_stripped_state_vector() eq.evaluate(mode='weak', dw_mode='vector', asm_obj=residual) out[key] = residual[ir] else: out = self.create_stripped_state_vector() self.evaluate(mode='weak', dw_mode='vector', asm_obj=out) return out def eval_tangent_matrices(self, state, tangent_matrix, by_blocks=False, names=None): """ Evaluate (assemble) tangent matrices. Parameters ---------- state : array The vector of DOF values. Note that it is needed only in nonlinear terms. tangent_matrix : csr_matrix The preallocated CSR matrix with zero data. by_blocks : bool If True, return the individual blocks composing the whole matrix. Each equation should then correspond to one required block and should be named as `'block_name, test_variable_name, unknown_variable_name'`. names : list of str, optional Optionally, select only blocks with the given `names`, if `by_blocks` is True. Returns ------- out : csr_matrix or dict of csr_matrix The assembled matrix. If `by_blocks` is True, a dictionary is returned instead, with keys given by `block_name` part of the individual equation names. """ self.set_variables_from_state(state) if by_blocks: names = get_default(names, self.names) out = {} get_indx = self.variables.get_indx for name in names: eq = self[name] key, rname, cname = [aux.strip() for aux in eq.name.split(',')] ir = get_indx(rname, stripped=True, allow_dual=True) ic = get_indx(cname, stripped=True, allow_dual=True) tangent_matrix.data[:] = 0.0 eq.evaluate(mode='weak', dw_mode='matrix', asm_obj=tangent_matrix) out[key] = tangent_matrix[ir, ic] else: tangent_matrix.data[:] = 0.0 self.evaluate(mode='weak', dw_mode='matrix', asm_obj=tangent_matrix) out = tangent_matrix return out
class Equations(Container): @staticmethod def from_conf( conf, variables, regions, materials, integrals, setup=True, caches=None, user=None, cache_override=False, make_virtual=False, verbose=True, ): objs = OneTypeList(Equation) conf = copy(conf) if caches is None: caches = DataCaches() ii = 0 for name, desc in conf.iteritems(): if verbose: output('equation "%s":' % name) output(desc) eq = Equation.from_desc(name, desc, variables, regions, materials, integrals, caches=caches, user=user) objs.append(eq) ii += 1 obj = Equations( objs, setup=setup, caches=caches, cache_override=cache_override, make_virtual=make_virtual, verbose=verbose ) return obj def __init__(self, equations, setup=True, caches=None, cache_override=False, make_virtual=False, verbose=True): Container.__init__(self, equations) self.variables = Variables(self.collect_variables()) self.caches = get_default(caches, DataCaches()) self.clear_geometries() if setup: self.setup(cache_override=cache_override, make_virtual=make_virtual, verbose=verbose) def clear_geometries(self): self.geometries = {} def setup(self, cache_override=False, make_virtual=False, verbose=True): self.collect_conn_info() # This uses the conn_info created above. self.dof_conns = {} setup_dof_conns(self.conn_info, dof_conns=self.dof_conns, make_virtual=make_virtual, verbose=verbose) self.assign_geometries() self.set_cache_mode(cache_override) def collect_materials(self): """ Collect materials present in the terms of all equations. """ materials = [] for eq in self: materials.extend(eq.collect_materials()) # Make the list items unique. materials = list(set(materials)) return materials def collect_variables(self): """ Collect variables present in the terms of all equations. """ variables = [] for eq in self: variables.extend(eq.collect_variables()) # Make the list items unique. variables = list(set(variables)) return variables def get_variable(self, name): var = self.variables.get(name, msg_if_none="unknown variable! (%s)" % name) return var def collect_conn_info(self): """ Collect connectivity information as defined by the equations. """ self.conn_info = {} for eq in self: eq.collect_conn_info(self.conn_info) ## print_structs(self.conn_info) ## pause() return self.conn_info def assign_geometries(self): for eq in self: eq.assign_geometries(self.geometries) def get_variable_names(self): """Return the list of names of all variables used in equations.""" vns = set() for eq in self: for term in eq.terms: vns.update(term.get_variable_names()) return list(vns) ## # 27.02.2007, c def invalidate_term_caches(self): for cache in self.caches.itervalues(): cache.clear() ## # c: 07.05.2008, r: 07.05.2008 def reset_term_caches(self): for cache in self.caches.itervalues(): cache.reset() ## # 02.03.2007, c def set_cache_mode(self, cache_override): for cache in self.caches.itervalues(): cache.set_mode(cache_override) def time_update(self, ts, ebcs=None, epbcs=None, lcbcs=None, functions=None): self.variables.time_update(ts, functions) self.variables.equation_mapping(ebcs, epbcs, ts, functions) self.variables.setup_lcbc_operators(lcbcs) self.variables.setup_adof_conns() for eq in self: for term in eq.terms: term.time_update(ts) def setup_initial_conditions(self, ics, functions): self.variables.setup_initial_conditions(ics, functions) def get_graph_conns(self, any_dof_conn=False, rdcs=None, cdcs=None): """ Get DOF connectivities needed for creating tangent matrix graph. Parameters ---------- any_dof_conn : bool By default, only volume DOF connectivities are used, with the exception of trace surface DOF connectivities. If True, any kind of DOF connectivities is allowed. rdcs, cdcs : arrays, optional Additional row and column DOF connectivities, corresponding to the variables used in the equations. Returns ------- rdcs, cdcs : arrays The row and column DOF connectivities defining the matrix graph blocks. """ if rdcs is None: rdcs = [] cdcs = [] elif cdcs is None: cdcs = copy(rdcs) else: assert_(len(rdcs) == len(cdcs)) if rdcs is cdcs: # Make sure the lists are not the same object. rdcs = copy(rdcs) adcs = self.variables.adof_conns # Only volume dof connectivities are used, with the exception of trace # surface dof connectivities. shared = set() for key, ii, info in iter_dict_of_lists(self.conn_info, return_keys=True): rvar, cvar = info.virtual, info.state if (rvar is None) or (cvar is None): continue is_surface = rvar.is_surface or cvar.is_surface dct = info.dc_type.type if not (dct in ("volume", "scalar") or is_surface or info.is_trace or any_dof_conn): continue rreg_name = info.get_region_name(can_trace=False) creg_name = info.get_region_name() for rig, cig in info.iter_igs(): rname = rvar.get_primary_name() rkey = (rname, rreg_name, dct, rig) ckey = (cvar.name, creg_name, dct, cig) dc_key = (rkey, ckey) ## print dc_key if not dc_key in shared: try: rdcs.append(adcs[rkey]) cdcs.append(adcs[ckey]) except: debug() shared.add(dc_key) ## print shared for ii in range(len(rdcs)): if (rdcs[ii].ndim == 1) and (cdcs[ii].ndim == 2): rdcs[ii] = _fix_scalar_dc(rdcs[ii], cdcs[ii]) elif (cdcs[ii].ndim == 1) and (rdcs[ii].ndim == 2): cdcs[ii] = _fix_scalar_dc(cdcs[ii], rdcs[ii]) elif (cdcs[ii].ndim == 1) and (rdcs[ii].ndim == 1): rdcs[ii] = nm.array(rdcs[ii], ndmin=2) cdcs[ii] = nm.array(cdcs[ii], ndmin=2) return rdcs, cdcs def create_matrix_graph(self, any_dof_conn=False, rdcs=None, cdcs=None, shape=None): """ Create tangent matrix graph, i.e. preallocate and initialize the sparse storage needed for the tangent matrix. Order of DOF connectivities is not important. Parameters ---------- any_dof_conn : bool By default, only volume DOF connectivities are used, with the exception of trace surface DOF connectivities. If True, any kind of DOF connectivities is allowed. rdcs, cdcs : arrays, optional Additional row and column DOF connectivities, corresponding to the variables used in the equations. shape : tuple, optional The required shape, if it is different from the shape determined by the equations variables. This may be needed if additional row and column DOF connectivities are passed in. Returns ------- matrix : csr_matrix The matrix graph in the form of a CSR matrix with preallocated structure and zero data. """ if not self.variables.has_virtuals(): output("no matrix (no test variables)!") return None shape = get_default(shape, self.variables.get_matrix_shape()) output("matrix shape:", shape) if nm.prod(shape) == 0: output("no matrix (zero size)!") return None rdcs, cdcs = self.get_graph_conns(any_dof_conn=any_dof_conn, rdcs=rdcs, cdcs=cdcs) if not len(rdcs): output("no matrix (empty dof connectivities)!") return None output("assembling matrix graph...") tt = time.clock() ret, prow, icol = raw_graph(int(shape[0]), int(shape[1]), len(rdcs), rdcs, cdcs) output("...done in %.2f s" % (time.clock() - tt)) nnz = prow[-1] output("matrix structural nonzeros: %d (%.2e%% fill)" % (nnz, float(nnz) / nm.prod(shape))) ## print ret, prow, icol, nnz data = nm.zeros((nnz,), dtype=self.variables.dtype) matrix = sp.csr_matrix((data, icol, prow), shape) ## matrix.save( 'matrix', format = '%d %d %e\n' ) ## pause() return matrix ## # c: 02.04.2008, r: 02.04.2008 def init_time(self, ts): for cache in self.caches.itervalues(): cache.init_time(ts) ## # 08.06.2007, c def advance(self, ts): for cache in self.caches.itervalues(): cache.advance(ts.step + 1) for eq in self: for term in eq.terms: term.advance(ts) self.variables.advance(ts) ## # Interface to self.variables. def create_state_vector(self): return self.variables.create_state_vector() def create_stripped_state_vector(self): return self.variables.create_stripped_state_vector() def strip_state_vector(self, vec, follow_epbc=True): """ Strip a full vector by removing EBC dofs. If 'follow_epbc' is True, values of EPBC master dofs are not simply thrown away, but added to the corresponding slave dofs, just like when assembling. """ return self.variables.strip_state_vector(vec, follow_epbc=follow_epbc) def make_full_vec(self, svec, var_name=None, force_value=None): """ Make a full vector satisfying E(P)BC from a stripped vector. For a selected variable if var_name is set. """ return self.variables.make_full_vec(svec, var_name=var_name, force_value=force_value) def set_variables_from_state(self, vec, step=0): """ Set data (vectors of DOF values) of variables. Paramters --------- data : array The state vector. step : int The time history step, 0 (default) = current. """ self.variables.set_data(vec, step=step) def get_state_parts(self, vec=None): """ Return parts of a state vector corresponding to individual state variables. Parameters ---------- vec : array, optional The state vector. If not given, then the data stored in the variables are returned instead. Returns ------- out : dict The dictionary of the state parts. """ return self.variables.get_state_parts(vec) def set_data(self, data, step=0, ignore_unknown=False): """ Set data (vectors of DOF values) of variables. Parameters ---------- data : array The dictionary of {variable_name : data vector}. step : int, optional The time history step, 0 (default) = current. ignore_unknown : bool, optional Ignore unknown variable names if `data` is a dict. """ self.variables.set_data(data, step=step, ignore_unknown=ignore_unknown) def apply_ebc(self, vec, force_values=None): """ Apply essential (Dirichlet) boundary conditions to a state vector. """ self.variables.apply_ebc(vec, force_values=force_values) def apply_ic(self, vec, force_values=None): """ Apply initial conditions to a state vector. """ self.variables.apply_ic(vec, force_values=force_values) def state_to_output(self, vec, fill_value=None, var_info=None, extend=True): return self.variables.state_to_output(vec, fill_value=fill_value, var_info=var_info, extend=extend) def get_lcbc_operator(self): return self.variables.get_lcbc_operator() def evaluate(self, mode="eval", dw_mode="vector", term_mode=None, asm_obj=None): """ Parameters ---------- mode : one of 'eval', 'el_avg', 'qp', 'weak' The evaluation mode. """ if mode == "weak": out = asm_obj else: out = {} for eq in self: eout = eq.evaluate(mode=mode, dw_mode=dw_mode, term_mode=term_mode, asm_obj=asm_obj) if mode != "weak": out[eq.name] = eout if (len(self) == 1) and (mode != "weak"): out = out.popitem()[1] return out def eval_residuals(self, state): self.invalidate_term_caches() self.set_variables_from_state(state) residual = self.create_stripped_state_vector() self.evaluate(mode="weak", dw_mode="vector", asm_obj=residual) return residual def eval_tangent_matrices(self, state, tangent_matrix, by_blocks=False): """ Evaluate (assemble) tangent matrices. Parameters ---------- state : array The vector of DOF values. Note that it is needed only in nonlinear terms. tangent_matrix : csr_matrix The preallocated CSR matrix with zero data. by_blocks : bool If True, return the individual blocks composing the whole matrix. Each equation should then correspond to one required block and should be named as `'block_name, test_variable_name, unknown_variable_name'`. Returns ------- out : csr_matrix or dict of csr_matrix The assembled matrix. If `by_blocks` is True, a dictionary is returned instead, with keys given by `block_name` part of the individual equation names. """ self.set_variables_from_state(state) self.evaluate(mode="weak", dw_mode="matrix", asm_obj=tangent_matrix) if by_blocks: out = {} get_indx = self.variables.get_indx for eq in self: key, rname, cname = [aux.strip() for aux in eq.name.split(",")] ir = get_indx(rname, stripped=True, allow_dual=True) ic = get_indx(cname, stripped=True, allow_dual=True) out[key] = tangent_matrix[ir, ic] else: out = tangent_matrix return out
def main(): parser = OptionParser(usage=usage, version='%prog') parser.add_option('-b', '--basis', metavar='name', action='store', dest='basis', default='lagrange', help=help['basis']) parser.add_option('-d', '--derivative', metavar='d', type=int, action='store', dest='derivative', default=0, help=help['derivative']) parser.add_option('-n', '--max-order', metavar='order', type=int, action='store', dest='max_order', default=2, help=help['max_order']) parser.add_option('-g', '--geometry', metavar='name', action='store', dest='geometry', default='2_4', help=help['geometry']) parser.add_option('-m', '--mesh', metavar='mesh', action='store', dest='mesh', default=None, help=help['mesh']) parser.add_option('', '--permutations', metavar='permutations', action='store', dest='permutations', default=None, help=help['permutations']) parser.add_option('', '--dofs', metavar='dofs', action='store', dest='dofs', default=None, help=help['dofs']) parser.add_option('-l', '--lin-options', metavar='options', action='store', dest='lin_options', default='min_level=2,max_level=5,eps=1e-3', help=help['lin_options']) parser.add_option('', '--plot-dofs', action='store_true', dest='plot_dofs', default=False, help=help['plot_dofs']) options, args = parser.parse_args() if len(args) == 1: output_dir = args[0] else: parser.print_help(), return output('polynomial space:', options.basis) output('max. order:', options.max_order) lin = Struct(kind='adaptive', min_level=2, max_level=5, eps=1e-3) for opt in options.lin_options.split(','): key, val = opt.split('=') setattr(lin, key, eval(val)) if options.mesh is None: dim, n_ep = int(options.geometry[0]), int(options.geometry[2]) output('reference element geometry:') output(' dimension: %d, vertices: %d' % (dim, n_ep)) gel = GeometryElement(options.geometry) gps = PolySpace.any_from_args(None, gel, 1, base=options.basis) ps = PolySpace.any_from_args(None, gel, options.max_order, base=options.basis) n_digit, _format = get_print_info(ps.n_nod, fill='0') name_template = os.path.join(output_dir, 'bf_%s.vtk' % _format) for ip in get_dofs(options.dofs, ps.n_nod): output('shape function %d...' % ip) def eval_dofs(iels, rx): if options.derivative == 0: bf = ps.eval_base(rx).squeeze() rvals = bf[None, :, ip:ip + 1] else: bfg = ps.eval_base(rx, diff=True) rvals = bfg[None, ..., ip] return rvals def eval_coors(iels, rx): bf = gps.eval_base(rx).squeeze() coors = nm.dot(bf, gel.coors)[None, ...] return coors (level, coors, conn, vdofs, mat_ids) = create_output(eval_dofs, eval_coors, 1, ps, min_level=lin.min_level, max_level=lin.max_level, eps=lin.eps) out = { 'bf': Struct(name='output_data', mode='vertex', data=vdofs, var_name='bf', dofs=None) } mesh = Mesh.from_data('bf_mesh', coors, None, [conn], [mat_ids], [options.geometry]) name = name_template % ip mesh.write(name, out=out) output('...done (%s)' % name) else: mesh = Mesh.from_file(options.mesh) output('mesh geometry:') output(' dimension: %d, vertices: %d, elements: %d' % (mesh.dim, mesh.n_nod, mesh.n_el)) domain = Domain('domain', mesh) if options.permutations: permutations = [int(ii) for ii in options.permutations.split(',')] output('using connectivity permutations:', permutations) for group in domain.iter_groups(): perms = group.gel.get_conn_permutations()[permutations] offsets = nm.arange(group.shape.n_el) * group.shape.n_ep group.conn[:] = group.conn.take(perms + offsets[:, None]) domain.setup_facets() omega = domain.create_region('Omega', 'all') field = Field.from_args('f', nm.float64, shape=1, region=omega, approx_order=options.max_order, poly_space_base=options.basis) var = FieldVariable('u', 'unknown', field, 1) if options.plot_dofs: import sfepy.postprocess.plot_dofs as pd group = domain.groups[0] ax = pd.plot_mesh(None, mesh.coors, mesh.conns[0], group.gel.edges) ax = pd.plot_global_dofs(ax, field.get_coor(), field.aps[0].econn) ax = pd.plot_local_dofs(ax, field.get_coor(), field.aps[0].econn) if options.dofs is not None: ax = pd.plot_nodes(ax, field.get_coor(), field.aps[0].econn, field.aps[0].interp.poly_spaces['v'].nodes, get_dofs(options.dofs, var.n_dof)) pd.plt.show() output('dofs: %d' % var.n_dof) vec = nm.empty(var.n_dof, dtype=var.dtype) n_digit, _format = get_print_info(var.n_dof, fill='0') name_template = os.path.join(output_dir, 'dof_%s.vtk' % _format) for ip in get_dofs(options.dofs, var.n_dof): output('dof %d...' % ip) vec.fill(0.0) vec[ip] = 1.0 var.set_data(vec) if options.derivative == 0: out = var.create_output(vec, linearization=lin) else: out = create_expression_output('ev_grad.ie.Elements(u)', 'u', 'f', {'f': field}, None, Variables([var]), mode='qp', verbose=False, min_level=lin.min_level, max_level=lin.max_level, eps=lin.eps) name = name_template % ip out['u'].mesh.write(name, out=out) output('...done (%s)' % name)