コード例 #1
0
ファイル: test_fem.py プロジェクト: Nasrollah/sfepy
    def test_base_functions_delta(self):
        """
        Test :math:`\delta` property of base functions evaluated in the
        reference element nodes.
        """
        from sfepy.base.base import ordered_iteritems
        from sfepy.discrete.fem.poly_spaces import PolySpace

        ok = True

        for key, gel in ordered_iteritems(self.gels):
            for order in range(11):
                ps = PolySpace.any_from_args('aux', gel, order,
                                             base='lagrange',
                                             force_bubble=False)
                bf = ps.eval_base(ps.node_coors)
                _ok = nm.allclose(nm.eye(ps.n_nod),
                                  bf.squeeze(),
                                  rtol=0.0, atol=(order + 1) * 1e-14)

                self.report('%s order %d (n_nod: %d): %s'
                            % (key, order, ps.n_nod, _ok))

                if not _ok:
                    import pdb; pdb.set_trace()

            ok = ok and _ok

        return ok
コード例 #2
0
ファイル: time_poisson_interactive.py プロジェクト: rc/sfepy
        def poststep_fun(ts, vec):
            _poststep_fun(ts, vec)

            # Probe the solution.
            dvel_qp = ev('ev_diffusion_velocity.%d.Omega(m.diffusivity, T)'
                         % order, copy_materials=False, mode='qp')
            project_by_component(dvel, dvel_qp, component, order,
                                 nls_options=nls_options)

            all_results = []
            for ii, probe in enumerate(probes):
                fig, results = probe_results(ii, T, dvel, probe, labels[ii])

                all_results.append(results)

            plt.tight_layout()
            fig.savefig('time_poisson_interactive_probe_%s.png'
                        % (suffix % ts.step), bbox_inches='tight')

            for ii, results in enumerate(all_results):
                output('probe %d (%s):' % (ii, probes[ii].name))
                output.level += 2
                for key, res in ordered_iteritems(results):
                    output(key + ':')
                    val = res[1]
                    output('  min: %+.2e, mean: %+.2e, max: %+.2e'
                           % (val.min(), val.mean(), val.max()))
                output.level -= 2
コード例 #3
0
    def plot_data(self, igs):
        send = self.plot_pipe.send

        ii = 0
        for ig, names in ordered_iteritems(self.data_names):
            if ig in igs:
                send(['ig', ig])
                send(['clear'])
                for ip, name in enumerate(names):
                    send(['ip', ip])
                    key = name_to_key(name, ii)
                    try:
                        send(['plot',
                              nm.array(self.x_values[ig]),
                              nm.array(self.data[key]),
                              self.plot_kwargs[ig][ip]])
                    except:
                        msg = "send failed! (%s, %s, %s)!" \
                              % (ii, name, self.data[key])
                        raise IOError(msg)
                    ii += 1

            else:
                ii += len(names)

        if self.show_legends:
            send(['legends'])

        send(['continue'])
コード例 #4
0
        def poststep_fun(ts, vec):
            _poststep_fun(ts, vec)

            # Probe the solution.
            dvel_qp = ev('ev_diffusion_velocity.%d.Omega(m.diffusivity, T)' %
                         order,
                         copy_materials=False,
                         mode='qp')
            project_by_component(dvel,
                                 dvel_qp,
                                 component,
                                 order,
                                 nls_options=nls_options)

            all_results = []
            for ii, probe in enumerate(probes):
                fig, results = probe_results(ii, T, dvel, probe, labels[ii])

                all_results.append(results)

            plt.tight_layout()
            fig.savefig('time_poisson_interactive_probe_%s.png' %
                        (suffix % ts.step),
                        bbox_inches='tight')

            for ii, results in enumerate(all_results):
                output('probe %d (%s):' % (ii, probes[ii].name))
                output.level += 2
                for key, res in ordered_iteritems(results):
                    output(key + ':')
                    val = res[1]
                    output('  min: %+.2e, mean: %+.2e, max: %+.2e' %
                           (val.min(), val.mean(), val.max()))
                output.level -= 2
コード例 #5
0
ファイル: show_terms_use.py プロジェクト: ZJLi2013/sfepy
def main():
    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('-c', '--counts',
                      action='store_true', dest='counts',
                      default=False, help=helps['counts'])
    parser.add_option('-u', '--unused',
                      action='store_true', dest='unused',
                      default=False, help=helps['unused'])
    options, args = parser.parse_args()

    if len(args) > 0:
        pdf_dir = os.path.realpath(args[0])

    else:
        parser.print_help(),
        return

    required, other = get_standard_keywords()

    terms_use = dict_from_keys_init(term_table.keys(), set)

    for filename in locate_files('*.py', pdf_dir):
        base = filename.replace(pdf_dir, '').lstrip(os.path.sep)
        output('trying "%s"...' % base)

        try:
            conf = ProblemConf.from_file(filename, required, other,
                                         verbose=False)

        except:
            output('...failed')
            continue

        use = conf.options.get('use_equations', 'equations')
        eqs_conf = getattr(conf, use)
        for key, eq_conf in eqs_conf.iteritems():
            term_descs = parse_definition(eq_conf)
            for td in term_descs:
                terms_use[td.name].add(base)

        output('...ok')
    output('...done')

    if options.unused:
        output('unused terms:')

        unused = [name for name in terms_use.keys()
                  if len(terms_use[name]) == 0]
        for name in sorted(unused):
            output('  ' + name)

        output('total: %d' % len(unused))

    else:
        output('terms use:')
        for name, ex_names in ordered_iteritems(terms_use):
            output('%s: %d' % (name, len(ex_names)))
            if not options.counts:
                for ex_name in sorted(ex_names):
                    output('  ' + ex_name)
コード例 #6
0
ファイル: test_fem.py プロジェクト: rosendo100/sfepy
    def test_base_functions_values(self):
        """
        Compare base function values and their gradients with correct
        data. Also test that sum of values over all element nodes gives one.
        """
        from sfepy.base.base import ordered_iteritems
        from sfepy.discrete.fem.poly_spaces import PolySpace

        ok = True

        for key, val in ordered_iteritems(test_bases):
            gel = self.gels[key[:3]]
            diff = key[-4:] == "grad"
            order = int(key[5])
            force_bubble = key[6:7] == "B"

            ps = PolySpace.any_from_args("aux", gel, order, base="lagrange", force_bubble=force_bubble)
            dim = ps.geometry.dim
            coors = nm.r_[ps.geometry.coors, [[0.2] * dim]]

            bf = ps.eval_base(coors, diff=diff)
            _ok = nm.allclose(val, bf, rtol=0.0, atol=1e-14)
            ## if not _ok:
            ##     nm.set_printoptions(threshold=1000000, linewidth=65)
            ##     print bf.__repr__()

            if not diff:
                _ok = _ok and nm.allclose(bf.sum(axis=2), 1.0, rtol=0.0, atol=1e-14)

            self.report("%s: %s" % (key, _ok))

            ok = ok and _ok

        return ok
コード例 #7
0
ファイル: coefficients.py プロジェクト: Nasrollah/sfepy
    def _save_dict_latex(self, adict, fd, names):
        fd.write( r'\begin{itemize}' )
        fd.write( '\n' )
        for key, val in ordered_iteritems(adict):
            if key.startswith('_a_'): continue

            try:
                lname = names[key]
            except:
                lname = self._escape_latex(key)
            fd.write( '\item %s:' % lname )
            fd.write( '\n' )

            if isinstance(val, dict):
                self._save_dict_latex(val, fd, names)

            elif isinstance(val, basestr):
                fd.write(self._escape_latex(val) + '\n')

            elif val.ndim == 0:
                fd.write('$' + self._format(val) + '$\n')

            elif val.ndim == 1:
                self._write1d(fd, val)

            elif val.ndim == 2:
                self._write2d(fd, val)

        fd.write( r'\end{itemize}' )
        fd.write( '\n\n' )
コード例 #8
0
    def test_base_functions_delta(self):
        """
        Test :math:`\delta` property of base functions evaluated in the
        reference element nodes.
        """
        from sfepy.base.base import ordered_iteritems
        from sfepy.fem.poly_spaces import PolySpace

        ok = True

        for key, gel in ordered_iteritems(self.gels):
            for order in range(11):
                ps = PolySpace.any_from_args('aux',
                                             gel,
                                             order,
                                             base='lagrange',
                                             force_bubble=False)
                bf = ps.eval_base(ps.node_coors)
                _ok = nm.allclose(nm.eye(ps.n_nod),
                                  bf.squeeze(),
                                  rtol=0.0,
                                  atol=(order + 1) * 1e-14)

                self.report('%s order %d (n_nod: %d): %s' %
                            (key, order, ps.n_nod, _ok))

                if not _ok:
                    import pdb
                    pdb.set_trace()

            ok = ok and _ok

        return ok
コード例 #9
0
    def _save_dict_latex(self, adict, fd, names):
        fd.write(r'\begin{itemize}')
        fd.write('\n')
        for key, val in ordered_iteritems(adict):
            if key.startswith('_a_'): continue

            try:
                lname = names[key]
            except:
                lname = self._escape_latex(key)
            fd.write('\item %s:' % lname)
            fd.write('\n')

            if isinstance(val, dict):
                self._save_dict_latex(val, fd, names)

            elif isinstance(val, basestr):
                fd.write(self._escape_latex(val) + '\n')

            elif val.ndim == 0:
                fd.write('$' + self._format(val) + '$\n')

            elif val.ndim == 1:
                self._write1d(fd, val)

            elif val.ndim == 2:
                self._write2d(fd, val)

        fd.write(r'\end{itemize}')
        fd.write('\n\n')
コード例 #10
0
ファイル: log.py プロジェクト: mikegraham/sfepy
    def plot_data(self, igs):
        send = self.plot_pipe.send

        ii = 0
        for ig, names in ordered_iteritems(self.data_names):
            if ig in igs:
                send(['ig', ig])
                send(['clear'])
                for name in names:
                    key = name_to_key(name, ii)
                    try:
                        send(['plot',
                              nm.array(self.x_values[ig]),
                              nm.array(self.data[key])])
                    except:
                        msg = "send failed! (%s, %s, %s)!" \
                              % (ii, name, self.data[key])
                        raise IOError(msg)
                    ii += 1

            else:
                ii += len(names)

        send(['legends'])
        send(['continue'])
コード例 #11
0
    def test_weight_consistency(self):
        """
        Test if integral of 1 (= sum of weights) gives the domain volume.
        """
        from sfepy.fem.quadratures import quadrature_tables

        ok = True
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report('geometry:', geometry)
            for order, qp in ordered_iteritems(qps):
                _ok = nm.allclose(qp.weights.sum(), qp.volume,
                                  rtol=0.0, atol=1e-15)
                self.report('order %d: %s' % (order, _ok))

                ok = ok and _ok

        return ok
コード例 #12
0
ファイル: test_quadratures.py プロジェクト: rc/sfepy
    def test_weight_consistency(self):
        """
        Test if integral of 1 (= sum of weights) gives the domain volume.
        """
        from sfepy.discrete.quadratures import quadrature_tables

        ok = True
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report("geometry:", geometry)
            for order, qp in ordered_iteritems(qps):
                diff = nm.abs(qp.weights.sum() - qp.volume)
                _ok = diff < 1e-14
                self.report("order %d: %s (%.2e)" % (order, _ok, diff))

                ok = ok and _ok

        return ok
コード例 #13
0
    def test_weight_consistency(self):
        """
        Test if integral of 1 (= sum of weights) gives the domain volume.
        """
        from sfepy.discrete.quadratures import quadrature_tables

        ok = True
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report('geometry:', geometry)
            for order, qp in ordered_iteritems(qps):
                diff = nm.abs(qp.weights.sum() - qp.volume)
                _ok = diff < 1e-14
                self.report('order %d: %s (%.2e)' % (order, _ok, diff))

                ok = ok and _ok

        return ok
コード例 #14
0
ファイル: plot_parallel_dofs.py プロジェクト: Gkdnz/sfepy
def plot_partitioning(axs, field, cell_tasks, gfd, output_dir, size):
    """
    Plot the partitioning of the domain and field DOFs.
    """
    mesh = field.domain.mesh

    ax = pc.plot_wireframe(axs[0], mesh.cmesh)
    coors = field.get_coor()
    econn = field.econn
    ax = pd.plot_global_dofs(ax, coors, econn)
    ax.set_title('global DOFs')
    ax.figure.savefig(os.path.join(output_dir, 'global_dofs.png'),
                      bbox_inches='tight')

    ax = pc.plot_wireframe(axs[1], mesh.cmesh)
    fig = ax.figure
    coors = field.get_coor()
    econn = field.econn

    id_map = gfd.id_map

    colors = nm.zeros((field.n_nod, 4))
    for ir, dof_map in ordered_iteritems(gfd.dof_maps):
        aux = id_map[dof_map[0]]
        colors[aux] = [0, 0, float(ir + 1) / size, 0.6]
        for aux in dof_map[1]:
            colors[id_map[aux]] = [0, 0, float(ir + 1) / size, 0.9]

    from sfepy.discrete.fem.utils import prepare_translate
    aux = prepare_translate(id_map[econn], econn)
    ax = label_dofs(ax, coors[aux], id_map, colors)

    mark_subdomains(ax, mesh.cmesh, cell_tasks, size, 0, 0.7)

    ax.set_title('subdomains of tasks and PETSc DOFs')

    fig.savefig(os.path.join(output_dir, 'petsc_dofs.png'),
                bbox_inches='tight')

    ax.set_title('')

    axis = ax.axis()
    for ir, ocells in enumerate(gfd.overlap_cells):
        aux = nm.zeros_like(cell_tasks)
        aux[ocells] = 10
        aux[gfd.cell_parts[ir]] = 1

        ax = fig.add_axes(ax.get_position(), frameon=False, label='aux')
        mark_subdomains(ax, mesh.cmesh, aux, 11, 1, 0.3, True)
        ax.axis(axis)
        ax.set_title('overlap cells on task %d' % ir)
        fig.savefig(os.path.join(output_dir,
                                 'petsc_overlaps_%02d.png' % ir),
                    bbox_inches='tight')
        fig.delaxes(ax)
コード例 #15
0
ファイル: gen_gallery.py プロジェクト: sdurve/sfepy
def generate_gallery_html(output_filename, gallery_dir,
                          rst_dir, thumbnails_dir, dir_map, link_prefix=''):
    """
    Generate the gallery html file with thumbnail images and links to
    examples.

    Parameters
    ----------
    output_filename : str
        The output html file name.
    gallery_dir : str
        The top level directory of gallery files.
    rst_dir : str
        The full path to rst files of examples within `gallery_dir`.
    thumbnails_dir : str
        The full path to thumbnail images within `gallery_dir`.
    dir_map : dict
        The directory mapping returned by `generate_rst_files()`
    link_prefix : str, optional
        The prefix to prepend to links to individual pages of examples.
    """
    output('generating %s...' % output_filename)

    lines = []
    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)

        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)

            ebase = full_rst_filename.replace(rst_dir, '')[1:]
            ebase = edit_filename(ebase, new_ext='.py')

            link_base = full_rst_filename.replace(gallery_dir, '')[1:]
            link = os.path.join(link_prefix,
                                os.path.splitext(link_base)[0] + '.html')

            _get_fig_filenames(ebase, thumbnails_dir).next()
            for thumbnail_filename in _get_fig_filenames(ebase,
                                                         thumbnails_dir):
                if not os.path.isfile(thumbnail_filename):
                    # Skip examples with no image (= failed examples).
                    continue

                thumbnail_name = thumbnail_filename.replace(gallery_dir,
                                                            '')[1:]
                line = _link_template % (link, thumbnail_name,
                                         os.path.splitext(ebase)[0])
                lines.append(line)

    fd = open(output_filename, 'w')
    fd.write(_gallery_template % (link_prefix, '\n'.join(lines)))
    fd.close()

    output('...done')
コード例 #16
0
ファイル: log.py プロジェクト: mikegraham/sfepy
    def iter_names(self, igs=None):
        if igs is None:
            igs = nm.arange(self.n_gr)

        ii = iseq = 0
        for ig, names in ordered_iteritems(self.data_names):
            for name in names:
                if ig in igs:
                    yield ig, ii, iseq, name
                    iseq += 1
                ii += 1
コード例 #17
0
def iter_names(data_names, igs=None):
    if igs is None:
        igs = nm.arange(len(data_names))

    ii = iseq = 0
    for ig, names in ordered_iteritems(data_names):
        for ip, name in enumerate(names):
            if ig in igs:
                yield ig, ip, ii, iseq, name
                iseq += 1
            ii += 1
コード例 #18
0
def plot_partitioning(axs, field, cell_tasks, gfd, output_dir, size):
    """
    Plot the partitioning of the domain and field DOFs.
    """
    mesh = field.domain.mesh

    ax = pc.plot_wireframe(axs[0], mesh.cmesh)
    coors = field.get_coor()
    econn = field.econn
    ax = pd.plot_global_dofs(ax, coors, econn)
    ax.set_title('global DOFs')
    ax.figure.savefig(os.path.join(output_dir, 'global_dofs.png'),
                      bbox_inches='tight')

    ax = pc.plot_wireframe(axs[1], mesh.cmesh)
    fig = ax.figure
    coors = field.get_coor()
    econn = field.econn

    id_map = gfd.id_map

    colors = nm.zeros((field.n_nod, 4))
    for ir, dof_map in ordered_iteritems(gfd.dof_maps):
        aux = id_map[dof_map[0]]
        colors[aux] = [0, 0, float(ir + 1) / size, 0.6]
        for aux in dof_map[1]:
            colors[id_map[aux]] = [0, 0, float(ir + 1) / size, 0.9]

    from sfepy.discrete.fem.utils import prepare_translate
    aux = prepare_translate(id_map[econn], econn)
    ax = label_dofs(ax, coors[aux], id_map, colors)

    mark_subdomains(ax, mesh.cmesh, cell_tasks, size, 0, 0.7)

    ax.set_title('subdomains of tasks and PETSc DOFs')

    fig.savefig(os.path.join(output_dir, 'petsc_dofs.png'),
                bbox_inches='tight')

    ax.set_title('')

    axis = ax.axis()
    for ir, ocells in enumerate(gfd.overlap_cells):
        aux = nm.zeros_like(cell_tasks)
        aux[ocells] = 10
        aux[gfd.cell_parts[ir]] = 1

        ax = fig.add_axes(ax.get_position(), frameon=False, label='aux')
        mark_subdomains(ax, mesh.cmesh, aux, 11, 1, 0.3, True)
        ax.axis(axis)
        ax.set_title('overlap cells on task %d' % ir)
        fig.savefig(os.path.join(output_dir, 'petsc_overlaps_%02d.png' % ir),
                    bbox_inches='tight')
        fig.delaxes(ax)
コード例 #19
0
    def iter_names(self, igs=None):
        if igs is None:
            igs = nm.arange(self.n_gr)

        ii = iseq = 0
        for ig, names in ordered_iteritems(self.data_names):
            for name in names:
                if ig in igs:
                    yield ig, ii, iseq, name
                    iseq += 1
                ii += 1
コード例 #20
0
def save_raw_bg_logs(filename, logs):
    """
    Save raw band gaps `logs` into the `filename` file.
    """
    out = {}

    iranges = nm.cumsum([0] + [len(ii) for ii in logs.freqs])
    out['iranges'] = iranges
    for key, log in ordered_iteritems(logs.to_dict()):
        out[key] = nm.concatenate(log, axis=0)

    nm.savez(filename, **out)
コード例 #21
0
ファイル: band_gaps_app.py プロジェクト: lokik/sfepy
def save_raw_bg_logs(filename, logs):
    """
    Save raw band gaps `logs` into the `filename` file.
    """
    out = {}

    iranges = nm.cumsum([0] + [len(ii) for ii in logs.freqs])
    out['iranges'] = iranges
    for key, log in ordered_iteritems(logs.to_dict()):
        out[key] = nm.concatenate(log, axis=0)

    with open(filename, 'w') as fd:
        nm.savez(fd, **out)
コード例 #22
0
ファイル: test_term_call_modes.py プロジェクト: zitkat/sfepy
    def test_term_call_modes(self):
        from sfepy.terms import term_table
        ok = True

        failed = []
        for domain in self.domains:
            self.report('domain: %s' % domain.name)

            domain_geometry = list(domain.geom_els.values())[0].name
            if domain.shape.dim != domain.shape.tdim:
                domain_geometry = '%d_%s' % (domain.shape.dim, domain_geometry)

            for _, term_cls in ordered_iteritems(term_table):
                if (domain_geometry not in term_cls.geometries) \
                   or ("dg" in term_cls.name) \
                   or (term_cls.name == "dw_ns_dot_grad_s"):
                    continue

                if term_cls.integration == 'by_region':
                    rnames = ['Omega', 'Gamma']
                else:
                    vint = ('volume', 'point', 'custom')
                    rnames = ['Omega'] if term_cls.integration in vint\
                        else ['Gamma']

                for rname in rnames:
                    self.report('<-- %s.%s ...' % (term_cls.name, rname))

                    if rname == 'Gamma' and domain.mesh.dim == 1:
                        self.report('--> 1D Gamma region: not tested!')

                    elif term_cls.arg_shapes:
                        try:
                            _ok = self._test_single_term(
                                term_cls, domain, rname)

                        except:
                            _ok = False

                        if not _ok:
                            failed.append((domain.name, term_cls.name))

                        ok = ok and _ok
                        self.report('--> ok: %s' % _ok)

                    else:
                        self.report('--> not tested!')

        self.report('failed:', failed)

        return ok
コード例 #23
0
    def test_quadratures(self):
        """
        Test if the quadratures have orders they claim to have, using
        symbolic integration by sympy.
        """
        from sfepy.discrete.quadratures import quadrature_tables, tp_geometries
        from sfepy.discrete.integrals import Integral

        if sm is None:
            self.report('cannot import sympy, skipping')
            return True

        quad = Integral('test_integral')

        ok = True
        failed = []
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report('geometry:', geometry)

            if geometry == '0_1':
                continue

            elif geometry in tp_geometries:
                iter_qp = range(1, 11)

            elif geometry == '2_3':
                iter_qp = range(1, 25)

            elif geometry == '3_4':
                iter_qp = range(1, 12)

            else:
                iter_qp = sorted(qps.keys())

            for order in iter_qp:
                self.report('order:', order)

                aux = self._test_quadratures(quad, geometry, order)
                _ok, integral, val = aux

                if not _ok:
                    failed.append((geometry, order, float(integral), val))

                ok = ok and _ok

        if not ok:
            self.report('failed:')
            for aux in failed:
                self.report(aux, '%.1e' % abs(aux[2] - aux[3]))

        return ok
コード例 #24
0
ファイル: test_quadratures.py プロジェクト: rc/sfepy
    def test_quadratures(self):
        """
        Test if the quadratures have orders they claim to have, using
        symbolic integration by sympy.
        """
        from sfepy.discrete.quadratures import quadrature_tables, tp_geometries
        from sfepy.discrete.integrals import Integral

        if sm is None:
            self.report("cannot import sympy, skipping")
            return True

        quad = Integral("test_integral")

        ok = True
        failed = []
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report("geometry:", geometry)

            if geometry in tp_geometries:
                iter_qp = range(1, 11)

            elif geometry == "2_3":
                iter_qp = range(1, 25)

            elif geometry == "3_4":
                iter_qp = range(1, 12)

            else:
                iter_qp = sorted(qps.keys())

            for order in iter_qp:
                self.report("order:", order)

                aux = self._test_quadratures(quad, geometry, order)
                _ok, integral, val = aux

                if not _ok:
                    failed.append((geometry, order, float(integral), val))

                ok = ok and _ok

        if not ok:
            self.report("failed:")
            for aux in failed:
                self.report(aux, "%.1e" % abs(aux[2] - aux[3]))

        return ok
コード例 #25
0
    def test_quadratures(self):
        """
        Test if the quadratures have orders they claim to have, using
        symbolic integration by sympy.
        """
        from sfepy.fem.quadratures import quadrature_tables, tp_geometries
        from sfepy.fem.integrals import Integral

        if sm is None:
            self.report('cannot import sympy, skipping')
            return True

        quad = Integral('test_integral')

        ok = True
        failed = []
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report('geometry:', geometry)

            if geometry in tp_geometries:
                iter_qp = xrange(1, 11)

            elif geometry == '2_3':
                iter_qp = xrange(1, 25)

            elif geometry == '3_4':
                iter_qp = xrange(1, 12)

            else:
                iter_qp = sorted(qps.keys())

            for order in iter_qp:
                self.report('order:', order)

                aux = self._test_quadratures(quad, geometry, order)
                _ok, integral, val = aux

                if not _ok:
                    failed.append((geometry, order, float(integral), val))

                ok = ok and _ok

        if not ok:
            self.report('failed:')
            for aux in failed:
                self.report(aux, '%.1e' % abs(aux[2] - aux[3]))

        return ok
コード例 #26
0
ファイル: log.py プロジェクト: xiaoyao79/sfepy
def write_log(output, log, info):
    xlabels, ylabels, yscales, names, plot_kwargs = zip(*info.values())
    _write_header(output, xlabels, ylabels, yscales, names, plot_kwargs)

    for ii, (xlabel, ylabel, yscale, names, plot_kwargs) \
        in ordered_iteritems(info):
        for ip, name in enumerate(names):
            xs, ys, vlines = log[name]

            for ir, x in enumerate(xs):
                output('{}: {}: {:.16e}'.format(name, x, ys[ir]))

                if x in vlines:
                    output(name + ': -----')

    output('# ended: %s' % time.asctime())
コード例 #27
0
ファイル: log.py プロジェクト: rc/sfepy
def write_log(output, log, info):
    xlabels, ylabels, yscales, names, plot_kwargs = zip(*info.values())
    _write_header(output, xlabels, ylabels, yscales, names, plot_kwargs)

    for ii, (xlabel, ylabel, yscale, names, plot_kwargs) \
        in ordered_iteritems(info):
        for ip, name in enumerate(names):
            xs, ys, vlines = log[name]

            for ir, x in enumerate(xs):
                output('{}: {}: {:.16e}'.format(name, x, ys[ir]))

                if x in vlines:
                    output(name + ': -----')

    output('# ended: %s' % time.asctime())
コード例 #28
0
ファイル: coefficients.py プロジェクト: renatocoutinho/sfepy
 def _save_dict( self, adict, fd, names, format ):
     for key, val in ordered_iteritems(adict):
         try:
             lname = names[key]
         except:
             lname = key
         fd.write( '%s:\n' % lname )
         if isinstance( val, dict ):
             self._save_dict( val, fd, names, format )
             fd.write( '\n' )
         elif isinstance(val, basestr):
             fd.write( val + '\n' )
         elif isinstance( val, float ):
             fd.write( '%e\n' % val )
         elif val.ndim == 0:
             fd.write( format % val )
             fd.write( '\n' )
         elif val.ndim == 1:
             for ic in xrange( val.shape[0] ):
                 fd.write( format % val[ic] )
                 if ic < (val.shape[0] - 1):
                     fd.write( ', ' )
                 else:
                     fd.write( '\n' )
         elif val.ndim == 2:
             for ir in xrange( val.shape[0] ):
                 for ic in xrange( val.shape[1] ):
                     fd.write( format % val[ir,ic] )
                     if ic < (val.shape[1] - 1):
                         fd.write( ', ' )
                     elif ir < (val.shape[0] - 1):
                         fd.write( ';\n' )
             fd.write( '\n' )
         elif val.ndim == 3:
             for ii in xrange( val.shape[0] ):
                 fd.write( '  step %d:\n' % ii )
                 for ir in xrange( val.shape[1] ):
                     for ic in xrange( val.shape[2] ):
                         fd.write( '  ' + format % val[ii,ir,ic] )
                         if ic < (val.shape[2] - 1):
                             fd.write( ', ' )
                         elif ir < (val.shape[1] - 1):
                             fd.write( ';\n' )
                 fd.write( '\n' )
             fd.write( '\n' )
         fd.write( '\n' )
コード例 #29
0
    def _save_dict_latex(self, adict, fd, names, idx=None):
        fd.write(r'\begin{itemize}')
        fd.write('\n')
        for key, val in ordered_iteritems(adict):
            if key.startswith('_a_'): continue

            try:
                lname = names[key]
            except:
                lname = self._escape_latex(key)
            fd.write('\item %s:' % lname)
            fd.write('\n')

            if isinstance(val, list):
                if idx is not None:
                    val = val[idx]
                else:
                    raise NotImplementedError("'idx' must be set in the case "
                                              "of multi-coefficients!")

            if isinstance(val, dict):
                self._save_dict_latex(val, fd, names)

            elif isinstance(val, basestr):
                fd.write(self._escape_latex(val) + '\n')

            elif isinstance(val, float):
                fd.write('$' + self._format(val) + '$\n')

            elif isinstance(val, nm.ndarray):
                if val.ndim == 0:
                    fd.write('$' + self._format(val) + '$\n')

                elif val.ndim == 1:
                    self._write1d(fd, val)

                elif val.ndim == 2:
                    self._write2d(fd, val)

            else:
                fd.write('%s' % val)

        fd.write(r'\end{itemize}')
        fd.write('\n\n')
コード例 #30
0
ファイル: coefficients.py プロジェクト: clazaro/sfepy
    def _save_dict_latex(self, adict, fd, names, idx=None):
        fd.write( r'\begin{itemize}' )
        fd.write( '\n' )
        for key, val in ordered_iteritems(adict):
            if key.startswith('_a_'): continue

            try:
                lname = names[key]
            except:
                lname = self._escape_latex(key)
            fd.write( '\item %s:' % lname )
            fd.write( '\n' )

            if isinstance(val, list):
                if idx is not None:
                    val = val[idx]
                else:
                    raise NotImplementedError("'idx' must be set in the case "
                                              "of multi-coefficients!")

            if isinstance(val, dict):
                self._save_dict_latex(val, fd, names)

            elif isinstance(val, basestr):
                fd.write(self._escape_latex(val) + '\n')

            elif isinstance(val, float):
                fd.write('$' + self._format(val) + '$\n')

            elif isinstance(val, nm.ndarray):
                if val.ndim == 0:
                    fd.write('$' + self._format(val) + '$\n')

                elif val.ndim == 1:
                    self._write1d(fd, val)

                elif val.ndim == 2:
                    self._write2d(fd, val)

            else:
                fd.write('%s' % val)

        fd.write( r'\end{itemize}' )
        fd.write( '\n\n' )
コード例 #31
0
def write_log(output, log, info):
    xlabels, ylabels, yscales, names, plot_kwargs = zip(*info.values())
    _write_header(output, xlabels, ylabels, yscales, names, plot_kwargs)

    offset = 0
    for ig, (xlabel, ylabel, yscale, names, plot_kwargs) \
        in ordered_iteritems(info):
        for ip, name in enumerate(names):
            xs, ys, vlines = log[ip + offset]

            for ir, x in enumerate(xs):
                output('{}: {}: {:.16e}'.format(ip + offset, x, ys[ir]))

                if x in vlines:
                    output('%d: -----' % (ip + offset))

        offset += len(names)

    output('# ended: %s' % time.asctime())
コード例 #32
0
ファイル: test_term_call_modes.py プロジェクト: clazaro/sfepy
    def test_term_call_modes(self):
        from sfepy.terms import term_table
        ok = True

        failed = []
        for domain in self.domains:
            self.report('domain: %s' % domain.name)

            domain_geometry = list(domain.geom_els.values())[0].name
            if domain.shape.dim != domain.shape.tdim:
                domain_geometry = '%d_%s' % (domain.shape.dim, domain_geometry)

            for _, term_cls in ordered_iteritems(term_table):
                if not domain_geometry in term_cls.geometries:
                    continue

                vint = ('volume', 'point', 'custom')
                rname = 'Omega' if term_cls.integration in vint else 'Gamma'

                self.report('<-- %s ...' % term_cls.name)

                if rname == 'Gamma' and domain.mesh.dim == 1:
                    self.report('--> 1D Gamma region: not tested!')

                elif term_cls.arg_shapes:
                    try:
                        _ok = self._test_single_term(term_cls, domain, rname)

                    except:
                        _ok = False

                    if not _ok:
                        failed.append((domain.name, term_cls.name))

                    ok = ok and _ok
                    self.report('--> ok: %s' % _ok)

                else:
                    self.report('--> not tested!')

        self.report('failed:', failed)

        return ok
コード例 #33
0
    def test_term_call_modes(self):
        from sfepy.terms import term_table
        ok = True

        failed = []
        for domain in self.domains:
            self.report('domain: %s' % domain.name)

            for _, term_cls in ordered_iteritems(term_table):
                if not domain.groups[0].gel.name in term_cls.geometries:
                    continue

                rname = 'Omega' \
                        if term_cls.integration in ('volume', 'point') \
                        else 'Gamma'

                self.report('<-- %s ...' % term_cls.name)

                if rname == 'Gamma' and domain.mesh.dim == 1:
                    self.report('--> 1D Gamma region: not tested!')

                elif term_cls.arg_shapes:
                    try:
                        _ok = self._test_single_term(term_cls, domain, rname)

                    except:
                        _ok = False

                    if not _ok:
                        failed.append((domain.name, term_cls.name))

                    ok = ok and _ok
                    self.report('--> ok: %s' % _ok)

                else:
                    self.report('--> not tested!')

        self.report('failed:', failed)

        return ok
コード例 #34
0
    def test_term_call_modes(self):
        from sfepy.terms import term_table
        ok = True

        failed = []
        for domain in self.domains:
            self.report('domain: %s' % domain.name)

            for _, term_cls in ordered_iteritems(term_table):
                if not domain.groups[0].gel.name in term_cls.geometries:
                    continue

                rname = 'Omega' \
                        if term_cls.integration in ('volume', 'point') \
                        else 'Gamma'

                self.report('<-- %s ...' % term_cls.name)

                if rname == 'Gamma' and domain.mesh.dim == 1:
                    self.report('--> 1D Gamma region: not tested!')

                elif term_cls.arg_shapes:
                    try:
                        _ok = self._test_single_term(term_cls, domain, rname)

                    except:
                        _ok = False

                    if not _ok:
                        failed.append((domain.name, term_cls.name))

                    ok = ok and _ok
                    self.report('--> ok: %s' % _ok)

                else:
                    self.report('--> not tested!')

        self.report('failed:', failed)

        return ok
コード例 #35
0
    def test_base_functions_values(self):
        """
        Compare base function values and their gradients with correct
        data. Also test that sum of values over all element nodes gives one.
        """
        from sfepy.base.base import ordered_iteritems
        from sfepy.fem.poly_spaces import PolySpace

        ok = True

        for key, val in ordered_iteritems(test_bases):
            gel = self.gels[key[:3]]
            diff = key[-4:] == 'grad'
            order = int(key[5])
            force_bubble = key[6:7] == 'B'

            ps = PolySpace.any_from_args('aux',
                                         gel,
                                         order,
                                         base='lagrange',
                                         force_bubble=force_bubble)
            dim = ps.geometry.dim
            coors = nm.r_[ps.geometry.coors, [[0.2] * dim]]

            bf = ps.eval_base(coors, diff=diff)
            _ok = nm.allclose(val, bf, rtol=0.0, atol=1e-14)
            ## if not _ok:
            ##     nm.set_printoptions(threshold=1000000, linewidth=65)
            ##     print bf.__repr__()

            if not diff:
                _ok = _ok and nm.allclose(
                    bf.sum(axis=2), 1.0, rtol=0.0, atol=1e-14)

            self.report('%s: %s' % (key, _ok))

            ok = ok and _ok

        return ok
コード例 #36
0
ファイル: gen_gallery.py プロジェクト: muratchelik/sfepy
def generate_gallery(examples_dir, output_filename, doc_dir,
                     rst_dir, thumbnails_dir, dir_map, n_col=3):
    """
    Generate the gallery rst file with thumbnail images and links to
    examples.

    Parameters
    ----------
    output_filename : str
        The output rst file name.
    doc_dir : str
        The top level directory of gallery files.
    rst_dir : str
        The full path to rst files of examples within `doc_dir`.
    thumbnails_dir : str
        The full path to thumbnail images within `doc_dir`.
    dir_map : dict
        The directory mapping returned by `generate_rst_files()`
    n_col : int
        The number of columns in the gallery table.
    """
    output('generating %s...' % output_filename)

    lines = [_gallery_head]

    for dirname, filenames in ordered_iteritems(dir_map):
        title= ['   %s' % dirname.title().replace('_', ' '),
                '   ' + len(dirname) * '^' + '',
                _gallery_table]

        llines = []
        icol = 0
        for ex_filename, rst_filename in filenames:
            ebase = ex_filename.replace(examples_dir, '')[1:]
            link = os.path.splitext(rst_filename)[0]

            thumbnail_filename = next(_get_fig_filenames(ebase,
                                                         thumbnails_dir))
            if not os.path.isfile(thumbnail_filename):
                # Skip examples with no image (= failed examples).
                output('warning: figure "%s" not found' % thumbnail_filename)
                continue

            thumbnail_name = thumbnail_filename.replace(doc_dir, '..')
            path_to_file = os.path.join(examples_dir, ebase)
            docstring = get_default(import_file(path_to_file).__doc__,
                                    'missing description!')
            docstring = docstring.replace('e.g.', 'eg:')
            docstring = docstring.split('.')
            label = docstring[0].strip()
            label = label.replace('\n', ' ')
            label = label.replace('  ', ' ')

            llines.append(_rst_item % (' ' if icol else '*', thumbnail_name,
                                       link + '.html', label, link))
            icol = (icol + 1) % n_col

        if icol > 0:
            for j in range(icol, n_col):
                llines.append(_rst_empty_item)

        if len(llines) > 0:
            lines += title + llines
        else:
            output('warning: no figures in "%s"' % dirname)

    fd = open(output_filename, 'wt')
    fd.write('\n'.join(lines))
    fd.close()

    output('...done')
コード例 #37
0
ファイル: gen_gallery.py プロジェクト: frankipod/sfepy
def generate_gallery_html(examples_dir, output_filename, gallery_dir,
                          rst_dir, thumbnails_dir, dir_map, link_prefix):
    """
    Generate the gallery html file with thumbnail images and links to
    examples.

    Parameters
    ----------
    output_filename : str
        The output html file name.
    gallery_dir : str
        The top level directory of gallery files.
    rst_dir : str
        The full path to rst files of examples within `gallery_dir`.
    thumbnails_dir : str
        The full path to thumbnail images within `gallery_dir`.
    dir_map : dict
        The directory mapping returned by `generate_rst_files()`
    link_prefix : str, optional
        The prefix to prepend to links to individual pages of examples.
    """
    output('generating %s...' % output_filename)

    with open(_gallery_template_file, 'r') as fd:
        gallery_template = fd.read()

    div_lines=[]
    sidebar = []
    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)
        dirnamenew = dirname.replace("_"," ")
        sidebarline = _side_links % (dirname, dirnamenew.title())
        lines = []
        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)

            ebase = full_rst_filename.replace(rst_dir, '')[1:]
            ebase = edit_filename(ebase, new_ext='.py')

            link_base = full_rst_filename.replace(gallery_dir, '')[1:]
            link = os.path.join(link_prefix,
                                os.path.splitext(link_base)[0] + '.html')

            _get_fig_filenames(ebase, thumbnails_dir).next()
            for thumbnail_filename in _get_fig_filenames(ebase,
                                                         thumbnails_dir):
                if not os.path.isfile(thumbnail_filename):
                    # Skip examples with no image (= failed examples).
                    continue

                thumbnail_name = thumbnail_filename.replace(gallery_dir,
                                                            '')[1:]
                path_to_file = os.path.join(examples_dir,ebase)
                docstring = get_default(import_file(path_to_file).__doc__,
                                        'missing description!')
                docstring = docstring.replace('e.g.', 'eg:')
                docstring = docstring.split('.')
                line = _link_template % (link,os.path.splitext(ebase)[0],
                                         thumbnail_name,link,docstring[0]+'.')
                lines.append(line)

        if(len(lines)!=0):
            div_lines.append(_div_line % (dirname, dirnamenew.title(),
                                          dirname, '\n'.join(lines)))
            sidebar.append(sidebarline)

    fd = open(output_filename, 'w')
    fd.write(gallery_template % ((link_prefix,) * 7
                                 + ('\n'.join(sidebar), '\n'.join(div_lines))))
    fd.close()

    output('...done')
コード例 #38
0
ファイル: gen_gallery.py プロジェクト: frankipod/sfepy
def generate_rst_files(rst_dir, examples_dir, images_dir):
    """
    Generate Sphinx rst files for examples in `examples_dir` with images
    in `images_dir` and put them into `rst_dir`.

    Returns
    -------
    dir_map : dict
        The directory mapping of examples and corresponding rst files.
    """
    ensure_path(rst_dir + os.path.sep)

    output('generating rst files...')

    dir_map = {}
    for ex_filename in locate_files('*.py', examples_dir):
        if _omit(ex_filename): continue

        ebase = ex_filename.replace(examples_dir, '')[1:]
        base_dir = os.path.dirname(ebase)

        rst_filename = os.path.basename(ex_filename).replace('.py', '.rst')

        dir_map.setdefault(base_dir, []).append((ex_filename, rst_filename))

    for dirname, filenames in dir_map.iteritems():
        filenames = sorted(filenames, cmp=lambda a, b: cmp(a[1], b[1]))
        dir_map[dirname ] = filenames

    # Main index.
    mfd = open(os.path.join(rst_dir, 'index.rst'), 'w')
    mfd.write(_index % ('sfepy', 'Examples', '=' * 8))

    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)
        ensure_path(full_dirname + os.path.sep)

        # Subdirectory index.
        ifd = open(os.path.join(full_dirname, 'index.rst'), 'w')
        ifd.write(_index % (dirname, dirname, '=' * len(dirname)))

        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)
            output('"%s"' % full_rst_filename.replace(rst_dir, '')[1:])
            rst_filename_ns = rst_filename.replace('.rst', '')
            ebase = ex_filename.replace(examples_dir, '')[1:]

            rst_ex_filename = _make_sphinx_path(ex_filename)
            docstring = get_default(import_file(ex_filename).__doc__,
                                    'missing description!')

            ifd.write('    %s\n' % rst_filename_ns)
            fig_include = ''
            fig_base = _get_fig_filenames(ebase, images_dir).next()
            for fig_filename in _get_fig_filenames(ebase, images_dir):
                rst_fig_filename = _make_sphinx_path(fig_filename)

                if os.path.exists(fig_filename):
                    fig_include += _image % rst_fig_filename + '\n'

            # Example rst file.
            fd = open(full_rst_filename, 'w')
            fd.write(_include % (fig_base, ebase, '=' * len(ebase),
                                 docstring,
                                 fig_include,
                                 rst_ex_filename, rst_ex_filename))
            fd.close()

        ifd.close()

        mfd.write('    %s/index\n' % dirname)

    mfd.close()

    output('...done')

    return dir_map
コード例 #39
0
def main():
    from sfepy import data_dir

    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('--diffusivity',
                      metavar='float',
                      type=float,
                      action='store',
                      dest='diffusivity',
                      default=1e-5,
                      help=helps['diffusivity'])
    parser.add_option('--ic-max',
                      metavar='float',
                      type=float,
                      action='store',
                      dest='ic_max',
                      default=2.0,
                      help=helps['ic_max'])
    parser.add_option('--order',
                      metavar='int',
                      type=int,
                      action='store',
                      dest='order',
                      default=2,
                      help=helps['order'])
    parser.add_option('-r',
                      '--refine',
                      metavar='int',
                      type=int,
                      action='store',
                      dest='refine',
                      default=0,
                      help=helps['refine'])
    parser.add_option('-p',
                      '--probe',
                      action="store_true",
                      dest='probe',
                      default=False,
                      help=helps['probe'])
    parser.add_option('-s',
                      '--show',
                      action="store_true",
                      dest='show',
                      default=False,
                      help=helps['show'])
    options, args = parser.parse_args()

    assert_((0 < options.order),
            'temperature approximation order must be at least 1!')

    output('using values:')
    output('  diffusivity:', options.diffusivity)
    output('  max. IC value:', options.ic_max)
    output('uniform mesh refinement level:', options.refine)

    mesh = Mesh.from_file(data_dir + '/meshes/3d/cylinder.mesh')
    domain = FEDomain('domain', mesh)

    if options.refine > 0:
        for ii in xrange(options.refine):
            output('refine %d...' % ii)
            domain = domain.refine()
            output('... %d nodes %d elements' %
                   (domain.shape.n_nod, domain.shape.n_el))

    omega = domain.create_region('Omega', 'all')
    left = domain.create_region('Left', 'vertices in x < 0.00001', 'facet')
    right = domain.create_region('Right', 'vertices in x > 0.099999', 'facet')

    field = Field.from_args('fu',
                            nm.float64,
                            'scalar',
                            omega,
                            approx_order=options.order)

    T = FieldVariable('T', 'unknown', field, history=1)
    s = FieldVariable('s', 'test', field, primary_var_name='T')

    m = Material('m', diffusivity=options.diffusivity * nm.eye(3))

    integral = Integral('i', order=2 * options.order)

    t1 = Term.new('dw_diffusion(m.diffusivity, s, T)',
                  integral,
                  omega,
                  m=m,
                  s=s,
                  T=T)
    t2 = Term.new('dw_volume_dot(s, dT/dt)', integral, omega, s=s, T=T)
    eq = Equation('balance', t1 + t2)
    eqs = Equations([eq])

    # Boundary conditions.
    ebc1 = EssentialBC('T1', left, {'T.0': 2.0})
    ebc2 = EssentialBC('T2', right, {'T.0': -2.0})

    # Initial conditions.
    def get_ic(coors, ic):
        x, y, z = coors.T
        return 2 - 40.0 * x + options.ic_max * nm.sin(4 * nm.pi * x / 0.1)

    ic_fun = Function('ic_fun', get_ic)
    ic = InitialCondition('ic', omega, {'T.0': ic_fun})

    ls = ScipyDirect({})

    nls_status = IndexedStruct()
    nls = Newton({'is_linear': True}, lin_solver=ls, status=nls_status)

    pb = Problem('heat', equations=eqs, nls=nls, ls=ls)
    pb.set_bcs(ebcs=Conditions([ebc1, ebc2]))
    pb.set_ics(Conditions([ic]))

    tss = SimpleTimeSteppingSolver({
        't0': 0.0,
        't1': 100.0,
        'n_step': 11
    },
                                   problem=pb)
    tss.init_time()

    if options.probe:
        # Prepare probe data.
        probes, labels = gen_lines(pb)

        ev = pb.evaluate
        order = 2 * (options.order - 1)

        gfield = Field.from_args('gu',
                                 nm.float64,
                                 'vector',
                                 omega,
                                 approx_order=options.order - 1)
        dvel = FieldVariable('dvel',
                             'parameter',
                             gfield,
                             primary_var_name='(set-to-None)')
        cfield = Field.from_args('gu',
                                 nm.float64,
                                 'scalar',
                                 omega,
                                 approx_order=options.order - 1)
        component = FieldVariable('component',
                                  'parameter',
                                  cfield,
                                  primary_var_name='(set-to-None)')

        nls_options = {'eps_a': 1e-16, 'i_max': 1}

        if options.show:
            plt.ion()

    # Solve the problem using the time stepping solver.
    suffix = tss.ts.suffix
    for step, time, state in tss():
        if options.probe:
            # Probe the solution.
            dvel_qp = ev('ev_diffusion_velocity.%d.Omega(m.diffusivity, T)' %
                         order,
                         copy_materials=False,
                         mode='qp')
            project_by_component(dvel,
                                 dvel_qp,
                                 component,
                                 order,
                                 nls_options=nls_options)

            all_results = []
            for ii, probe in enumerate(probes):
                fig, results = probe_results(ii, T, dvel, probe, labels[ii])

                all_results.append(results)

            plt.tight_layout()
            fig.savefig('time_poisson_interactive_probe_%s.png' %
                        (suffix % step),
                        bbox_inches='tight')

            if options.show:
                plt.draw()

            for ii, results in enumerate(all_results):
                output('probe %d (%s):' % (ii, probes[ii].name))
                output.level += 2
                for key, res in ordered_iteritems(results):
                    output(key + ':')
                    val = res[1]
                    output('  min: %+.2e, mean: %+.2e, max: %+.2e' %
                           (val.min(), val.mean(), val.max()))
                output.level -= 2
コード例 #40
0
ファイル: linear_elastic.py プロジェクト: adantra/Arma_paper
def main():
    from sfepy import data_dir

    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('-s', '--show',
                      action="store_true", dest='show',
                      default=False, help=help['show'])
    options, args = parser.parse_args()
    options_probe = True
    folder = str(uuid.uuid4())
    os.mkdir(folder)
    os.chdir(folder)
    
    file = open('README.txt', 'w')
    file.write('DIMENSIONS\n')
    file.write('Lx = '+str(dims[0])+' Ly = '+str(dims[1])+' Lz = '+str(dims[2])+'\n')
    file.write('DISCRETIZATION (NX, NY, NZ)\n')
    file.write(str(NX)+'  '+str(NY)+'  '+str(NZ)+'\n')
    file.write('MATERIALS\n')
    file.write(str(E_f)+' '+str(nu_f)+' '+str(E_m)+' '+str(nu_m)+'\n')
    
    #mesh = Mesh.from_file(data_dir + '/meshes/2d/rectangle_tri.mesh')
    
    mesh = mesh_generators.gen_block_mesh(dims,shape,centre,name='block')
    domain = FEDomain('domain', mesh)

    min_x, max_x = domain.get_mesh_bounding_box()[:,0]
    min_y, max_y = domain.get_mesh_bounding_box()[:,1]
    min_z, max_z = domain.get_mesh_bounding_box()[:,2]
    eps = 1e-8 * (max_x - min_x)
    print min_x, max_x
    print min_y, max_y
    print min_z, max_z
    R1 = domain.create_region('Ym',
                                  'vertices in z < %.10f' % (max_z/2))
    R2 = domain.create_region('Yf',
                                  'vertices in z >= %.10f' % (min_z/2))
    omega = domain.create_region('Omega', 'all')
    gamma1 = domain.create_region('Left',
                                  'vertices in x < %.10f' % (min_x + eps), 
                                  'facet')
    gamma2 = domain.create_region('Right',
                                  'vertices in x > %.10f' % (max_x - eps),
                                  'facet')
    gamma3 = domain.create_region('Front',
                                  'vertices in y < %.10f' % (min_y + eps),
                                  'facet')
    gamma4 = domain.create_region('Back',
                                  'vertices in y > %.10f' % (max_y - eps),
                                  'facet')
    gamma5 = domain.create_region('Bottom',
                                  'vertices in z < %.10f' % (min_z + eps),
                                  'facet')
    gamma6 = domain.create_region('Top',
                                  'vertices in z > %.10f' % (max_z - eps),
                                  'facet')



    field = Field.from_args('fu', nm.float64, 'vector', omega, approx_order=2)

    u = FieldVariable('u', 'unknown', field)
    v = FieldVariable('v', 'test', field, primary_var_name='u')
    mu=1.1
    lam=1.0
    m = Material('m', lam=lam, mu=mu)
    f = Material('f', val=[[0.0], [0.0],[0.0]])
    #mu,lam=m.get_constants_mu_lam()
    #print mu.lam 
    D = stiffness_from_lame(3,lam, mu)    
    mat = Material('Mat', D=D)

    #D = stiffness_from_youngpoisson(2, options.young, options.poisson)
    get_mat = Function('get_mat1',get_mat1)
    #get_mat1=Function('get_mat', (lambda ts, coors, mode=None, problem=None, **kwargs:
    #                get_mat(coors, mode, problem)))
    #mat = Material('Mat', function=Function('get_mat1',get_mat1))
    #mat = Material('Mat', 'get_mat')
    integral = Integral('i', order=3)

    t1 = Term.new('dw_lin_elastic(Mat.D, v, u)',
         integral, omega, Mat=mat, v=v, u=u)
    t2 = Term.new('dw_volume_lvf(f.val, v)', integral, omega, f=f, v=v)
    eq = Equation('balance', t1 + t2)
    eqs = Equations([eq])

    fix_u = EssentialBC('fix_u', gamma1, {'u.all' : 0.0})
    left_bc  = EssentialBC('Left',  gamma1, {'u.0' : 0.0})
    right_bc = EssentialBC('Right', gamma2, {'u.0' : 0.0})
    back_bc = EssentialBC('Front', gamma3, {'u.1' : 0.0})
    front_bc = EssentialBC('Back', gamma4, {'u.1' : 0.0})
    bottom_bc = EssentialBC('Bottom', gamma5, {'u.all' : 0.0})
    top_bc = EssentialBC('Top', gamma6, {'u.2' : 0.2})

    bc=[left_bc,right_bc,back_bc,front_bc,bottom_bc,top_bc]
    #bc=[bottom_bc,top_bc]

    bc_fun = Function('shift_u_fun', shift_u_fun, extra_args={'shift' : 0.01})
    shift_u = EssentialBC('shift_u', gamma2, {'u.0' : bc_fun})
    #get_mat = Function('get_mat1',get_mat1)
    #mat = Material('Mat', function=Function('get_mat1',get_mat1))
    #ls = ScipyDirect({'method':'umfpack'})
    ##############################
    #  ##### SOLVER SECTION  #####
    ##############################
    
    # GET MATRIX FOR PRECONTITIONER #
    
    
    #ls = ScipyIterative({'method':'bicgstab','i_max':5000,'eps_r':1e-10})
    #ls = ScipyIterative({})
    
#ls = PyAMGSolver({'i_max':5000,'eps_r':1e-10})
#conf = Struct(method='cg', precond='gamg', sub_precond=None,i_max=10000, eps_a=1e-50, eps_r=1e-5, eps_d=1e4, verbose=True)
    #ls = PETScKrylovSolver({'method' : 'cg', 'precond' : 'icc', 'eps_r' : 1e-10, 'i_max' : 5000})
    conf = Struct(method='bcgsl', precond='jacobi', sub_precond=None,
                  i_max=10000, eps_a=1e-50, eps_r=1e-10, eps_d=1e4,
                  verbose=True)
                  #conf = Struct(method = 'cg', precond = 'icc', eps_r = 1e-10, i_max = 5000)
    ls = PETScKrylovSolver(conf)
#if hasattr(ls.name,'ls.scipy_iterative'):
    file.write(str(ls.name)+' '+str(ls.conf.method)+' '+str(ls.conf.precond)+' '+str(ls.conf.eps_r)+' '+str(ls.conf.i_max)+'\n' )
        #    else:
#file.write(str(ls.name)+' '+str(ls.conf.method)+'\n')



   
   
   # conf = Struct(method='bcgsl', precond='jacobi', sub_precond=None,
   #                 i_max=10000, eps_a=1e-50, eps_r=1e-8, eps_d=1e4,
#              verbose=True)
            
                 
                 
#ls = PETScKrylovSolver(conf)



#ls = ScipyIterative({'method':'bicgstab','i_max':100,'eps_r':1e-10})


    nls_status = IndexedStruct()
    nls = Newton({'i_max':1,'eps_a':1e-10}, lin_solver=ls, status=nls_status)

    pb = Problem('elasticity', equations=eqs, nls=nls, ls=ls)
    


    dd=pb.get_materials()['Mat']
    dd.set_function(get_mat1)
    
    
    pb.save_regions_as_groups('regions')

    #pb.time_update(ebcs=Conditions([fix_u, shift_u]))

    pb.time_update(ebcs=Conditions(bc))
    pb.save_regions_as_groups('regions')

#ls = ScipyIterative({'method':'bicgstab','i_max':100,'eps_r':1e-10})


#   A = pb.mtx_a
#   M = spilu(A,fill_factor = 1)
    
    #conf = Struct(solvers ='ScipyIterative',method='bcgsl', sub_precond=None,
# i_max=1000, eps_r=1e-8)
        
#pb.set_conf_solvers(conf)
    vec = pb.solve()
    print nls_status
    file.write('TIME TO SOLVE\n')
    file.write(str(nls.status.time_stats['solve'])+'\n')
    file.write('TIME TO CREATE MATRIX\n')
    file.write(str(nls.status.time_stats['matrix'])+'\n')
    #out = post_process(out, pb, state, extend=False)
    ev = pb.evaluate
    out = vec.create_output_dict()
    strain = ev('ev_cauchy_strain.3.Omega(u)', mode='el_avg')
    stress = ev('ev_cauchy_stress.3.Omega(Mat.D, u)', mode='el_avg',
                copy_materials=False)

    out['cauchy_strain'] = Struct(name='output_data', mode='cell',
                                  data=strain, dofs=None)
    out['cauchy_stress'] = Struct(name='output_data', mode='cell',
                                  data=stress, dofs=None)


    # Postprocess the solution.
    #out = vec.create_output_dict()
    #out = stress_strain(out, pb, vec,lam,mu, extend=True)
    #pb.save_state('its2D_interactive.vtk', out=out)
    #print 'aqui estoy'
    pb.save_state('strain.vtk', out=out)
    #pb.save_state('disp.vtk', out=vec)
    #print 'ahora estoy aqui'
    #out = stress_strain(out, pb, vec, extend=True)
    #pb.save_state('out.vtk', out=out)
    print nls_status
    
    order = 3
    strain_qp = ev('ev_cauchy_strain.%d.Omega(u)' % order, mode='qp')
    stress_qp = ev('ev_cauchy_stress.%d.Omega(Mat.D, u)' % order,
                       mode='qp', copy_materials=False)

    file.close()
    options_probe=False
    if options_probe:
        # Probe the solution.
        probes, labels = gen_lines(pb)
        nls_options = {'eps_a':1e-8,'i_max':1}
        ls = ScipyDirect({})
        ls2 = ScipyIterative({'method':'bicgstab','i_max':5000,'eps_r':1e-20})
        order = 5
        sfield = Field.from_args('sym_tensor', nm.float64, (3,), omega,
                                approx_order=order-1)
        stress = FieldVariable('stress', 'parameter', sfield,
                               primary_var_name='(set-to-None)')
        strain = FieldVariable('strain', 'parameter', sfield,
                               primary_var_name='(set-to-None)')

        cfield = Field.from_args('component', nm.float64, 1, omega,
                                 approx_order=order-1)
        component = FieldVariable('component', 'parameter', cfield,
                                  primary_var_name='(set-to-None)')

        ev = pb.evaluate
        order = 2*(order - 1) #2 * (2- 1)
        print "before strain_qp"
        strain_qp = ev('ev_cauchy_strain.%d.Omega(u)' % order, mode='qp')
        stress_qp = ev('ev_cauchy_stress.%d.Omega(Mat.D, u)' % order,
                       mode='qp', copy_materials=False)
        print "before projections"
        print stress
        project_by_component(strain, strain_qp, component, order,ls2,nls_options)
        #print 'strain done'
        project_by_component(stress, stress_qp, component, order,ls2,nls_options)

        print "after projections"
        
        all_results = []
        for ii, probe in enumerate(probes):
            fig, results = probe_results2(u, strain, stress, probe, labels[ii])

            fig.savefig('test_probe_%d.png' % ii)
            all_results.append(results)

        for ii, results in enumerate(all_results):
            output('probe %d:' % ii)
            output.level += 2
            for key, res in ordered_iteritems(results):
                output(key + ':')
                val = res[1]
                output('  min: %+.2e, mean: %+.2e, max: %+.2e'
                       % (val.min(), val.mean(), val.max()))
            output.level -= 2
コード例 #41
0
    def _save_dict(self, adict, fd, names, format):
        for key, val in ordered_iteritems(adict):
            try:
                lname = names[key]
            except:
                lname = key
            fd.write('%s:\n' % lname)

            if hasattr(val, 'to_file_txt'):
                if val.to_file_txt is not None:
                    val.to_file_txt(fd, format, val)

                else:
                    fd.write('--\n')

            elif isinstance(val, dict):
                self._save_dict(val, fd, names, format)
                fd.write('\n')

            elif isinstance(val, list):
                if isinstance(val[0], basestr):
                    fd.write('\n'.join(val) + '\n')

            elif isinstance(val, basestr):
                fd.write(val + '\n')

            elif isinstance(val, float):
                fd.write('%e\n' % val)

            elif isinstance(val, nm.ndarray):
                if val.ndim == 0:
                    fd.write(format % val)
                    fd.write('\n')

                elif val.ndim == 1:
                    for ic in range(val.shape[0]):
                        fd.write(format % val[ic])
                        if ic < (val.shape[0] - 1):
                            fd.write(', ')
                        else:
                            fd.write('\n')

                elif val.ndim == 2:
                    for ir in range(val.shape[0]):
                        for ic in range(val.shape[1]):
                            fd.write(format % val[ir, ic])
                            if ic < (val.shape[1] - 1):
                                fd.write(', ')
                            elif ir < (val.shape[0] - 1):
                                fd.write(';\n')
                    fd.write('\n')

                elif val.ndim == 3:
                    for ii in range(val.shape[0]):
                        fd.write('  step %d:\n' % ii)
                        for ir in range(val.shape[1]):
                            for ic in range(val.shape[2]):
                                fd.write('  ' + format % val[ii, ir, ic])
                                if ic < (val.shape[2] - 1):
                                    fd.write(', ')
                                elif ir < (val.shape[1] - 1):
                                    fd.write(';\n')
                        fd.write('\n')
                    fd.write('\n')

            else:
                fd.write('--\n')

            fd.write('\n')
コード例 #42
0
def create_task_dof_maps(field, cell_tasks, inter_facets, is_overlap=True,
                         use_expand_dofs=False, save_inter_regions=False,
                         output_dir=None):
    """
    For each task list its inner and interface DOFs of the given field and
    create PETSc numbering that is consecutive in each subdomain.

    For each task, the DOF map has the following structure::

      [inner,
       [own_inter1, own_inter2, ...],
       [overlap_cells1, overlap_cells2, ...],
       n_task_total, task_offset]

    The overlapping cells are defined so that the system matrix corresponding
    to each task can be assembled independently, see [1]. TODO: Some "corner"
    cells may be added even if not needed - filter them out by using the PETSc
    DOFs range.

    When debugging domain partitioning problems, it is advisable to set
    `save_inter_regions` to True to save the task interfaces as meshes as well
    as vertex-based markers - to be used only with moderate problems and small
    numbers of tasks.

    [1] J. Sistek and F. Cirak. Parallel iterative solution of the
    incompressible Navier-Stokes equations with application to rotating
    wings. Submitted for publication, 2015
    """
    domain = field.domain
    cmesh = domain.cmesh

    if use_expand_dofs:
        id_map = nm.zeros(field.n_nod * field.n_components, dtype=nm.uint32)

    else:
        id_map = nm.zeros(field.n_nod, dtype=nm.uint32)

    def _get_dofs_region(field, region):
        dofs = field.get_dofs_in_region(region)
        if use_expand_dofs:
            dofs = expand_dofs(dofs, field.n_components)
        return dofs

    def _get_dofs_conn(field, conn):
        dofs = nm.unique(conn)
        if use_expand_dofs:
            dofs = expand_dofs(dofs, field.n_components)
        return dofs

    dof_maps = {}
    count = 0
    inter_count = 0
    ocs = nm.zeros(0, dtype=nm.int32)
    cell_parts = []
    for ir, ntasks in ordered_iteritems(inter_facets):
        cells = nm.where(cell_tasks == ir)[0].astype(nm.int32)
        cell_parts.append(cells)

        cregion = Region.from_cells(cells, domain, name='task_%d' % ir)
        domain.regions.append(cregion)
        dofs = _get_dofs_region(field, cregion)

        rdof_map = dof_maps.setdefault(ir, [None, [], [], 0, 0])
        inter_dofs = []
        for ic, facets in ordered_iteritems(ntasks):
            cdof_map = dof_maps.setdefault(ic, [None, [], [], 0, 0])

            name = 'inter_%d_%d' % (ir, ic)
            ii = ir

            region = Region.from_facets(facets, domain, name,
                                        parent=cregion.name)
            region.update_shape()

            if save_inter_regions:
                output_dir = output_dir if output_dir is not None else '.'
                filename = os.path.join(output_dir, '%s.mesh' % name)

                aux = domain.mesh.from_region(region, domain.mesh,
                                              is_surface=True)
                aux.write(filename, io='auto')

                mask = nm.zeros((domain.mesh.n_nod, 1), dtype=nm.float64)
                mask[region.vertices] = 1
                out = {name : Struct(name=name, mode='vertex', data=mask)}
                filename = os.path.join(output_dir, '%s.h5' % name)
                domain.mesh.write(filename, out=out, io='auto')

            inter_dofs.append(_get_dofs_region(field, region))

            sd = FESurface('surface_data_%s' % region.name, region,
                           field.efaces, field.econn, field.region)
            econn = sd.get_connectivity()
            n_facet = econn.shape[0]

            ii2 = max(int(n_facet / 2), 1)

            dr = _get_dofs_conn(field, econn[:ii2])
            ii = nm.where((id_map[dr] == 0))[0]
            n_new = len(ii)
            if n_new:
                rdof_map[1].append(dr[ii])
                rdof_map[3] += n_new
                id_map[dr[ii]] = 1
                inter_count += n_new
                count += n_new

                if is_overlap:
                    ovs = cmesh.get_incident(0, region.facets[:ii2],
                                             cmesh.tdim - 1)
                    ocs = cmesh.get_incident(cmesh.tdim, ovs, 0)
                    rdof_map[2].append(ocs.astype(nm.int32))

            dc = _get_dofs_conn(field, econn[ii2:])
            ii = nm.where((id_map[dc] == 0))[0]
            n_new = len(ii)
            if n_new:
                cdof_map[1].append(dc[ii])
                cdof_map[3] += n_new
                id_map[dc[ii]] = 1
                inter_count += n_new
                count += n_new

                if is_overlap:
                    ovs = cmesh.get_incident(0, region.facets[ii2:],
                                             cmesh.tdim - 1)
                    ocs = cmesh.get_incident(cmesh.tdim, ovs, 0)
                    cdof_map[2].append(ocs.astype(nm.int32))

        domain.regions.pop() # Remove the cell region.

        inner_dofs = nm.setdiff1d(dofs, nm.concatenate(inter_dofs))
        n_inner = len(inner_dofs)
        rdof_map[3] += n_inner
        assert_(nm.all(id_map[inner_dofs] == 0))
        id_map[inner_dofs] = 1
        count += n_inner

        rdof_map[0] = inner_dofs

    offset = 0
    overlap_cells = []
    for ir, dof_map in ordered_iteritems(dof_maps):
        n_owned = dof_map[3]

        i0 = len(dof_map[0])
        id_map[dof_map[0]] = nm.arange(offset, offset + i0, dtype=nm.uint32)
        for aux in dof_map[1]:
            i1 = len(aux)
            id_map[aux] = nm.arange(offset + i0, offset + i0 + i1,
                                    dtype=nm.uint32)
            i0 += i1

        if len(dof_map[2]):
            ocs = nm.unique(nm.concatenate(dof_map[2]))

        else:
            ocs = nm.zeros(0, dtype=nm.int32)

        overlap_cells.append(ocs)

        assert_(i0 == n_owned)

        dof_map[4] = offset
        offset += n_owned

    if not len(dof_maps):
        dofs = _get_dofs_region(field, field.region)
        dof_maps[0] = [dofs, [], [], len(dofs), 0]
        id_map[:] = nm.arange(len(dofs), dtype=nm.uint32)

    if not len(cell_parts):
        cell_parts.append(nm.arange(len(cell_tasks), dtype=nm.int32))

    if not len(overlap_cells):
        overlap_cells.append(nm.zeros(0, dtype=nm.int32))

    return dof_maps, id_map, cell_parts, overlap_cells
コード例 #43
0
ファイル: gen_gallery.py プロジェクト: mfkiwl/sfepy
def generate_gallery_html(examples_dir, output_filename, gallery_dir,
                          rst_dir, thumbnails_dir, dir_map, link_prefix):
    """
    Generate the gallery html file with thumbnail images and links to
    examples.

    Parameters
    ----------
    output_filename : str
        The output html file name.
    gallery_dir : str
        The top level directory of gallery files.
    rst_dir : str
        The full path to rst files of examples within `gallery_dir`.
    thumbnails_dir : str
        The full path to thumbnail images within `gallery_dir`.
    dir_map : dict
        The directory mapping returned by `generate_rst_files()`
    link_prefix : str, optional
        The prefix to prepend to links to individual pages of examples.
    """
    output('generating %s...' % output_filename)

    with open(_gallery_template_file, 'r') as fd:
        gallery_template = fd.read()

    div_lines=[]
    sidebar = []
    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)
        dirnamenew = dirname.replace("_"," ")
        sidebarline = _side_links % (dirname, dirnamenew.title())
        lines = []
        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)

            ebase = full_rst_filename.replace(rst_dir, '')[1:]
            ebase = edit_filename(ebase, new_ext='.py')

            link_base = full_rst_filename.replace(gallery_dir, '')[1:]
            link = os.path.join(link_prefix,
                                os.path.splitext(link_base)[0] + '.html')

            _get_fig_filenames(ebase, thumbnails_dir).next()
            for thumbnail_filename in _get_fig_filenames(ebase,
                                                         thumbnails_dir):
                if not os.path.isfile(thumbnail_filename):
                    # Skip examples with no image (= failed examples).
                    continue

                thumbnail_name = thumbnail_filename.replace(gallery_dir,
                                                            '')[1:]
                path_to_file = os.path.join(examples_dir,ebase)
                docstring = get_default(import_file(path_to_file).__doc__,
                                        'missing description!')
                docstring = docstring.replace('e.g.', 'eg:')
                docstring = docstring.split('.')
                line = _link_template % (link,os.path.splitext(ebase)[0],
                                         thumbnail_name,link,docstring[0]+'.')
                lines.append(line)

        if(len(lines)!=0):
            div_lines.append(_div_line % (dirname, dirnamenew.title(),
                                          dirname, '\n'.join(lines)))
            sidebar.append(sidebarline)

    fd = open(output_filename, 'w')
    fd.write(gallery_template % ((link_prefix,) * 7
                                 + ('\n'.join(sidebar), '\n'.join(div_lines))))
    fd.close()

    output('...done')
コード例 #44
0
ファイル: gen_gallery.py プロジェクト: mfkiwl/sfepy
def generate_rst_files(rst_dir, examples_dir, images_dir):
    """
    Generate Sphinx rst files for examples in `examples_dir` with images
    in `images_dir` and put them into `rst_dir`.

    Returns
    -------
    dir_map : dict
        The directory mapping of examples and corresponding rst files.
    """
    ensure_path(rst_dir + os.path.sep)

    output('generating rst files...')

    dir_map = {}
    for ex_filename in locate_files('*.py', examples_dir):
        if _omit(ex_filename): continue

        ebase = ex_filename.replace(examples_dir, '')[1:]
        base_dir = os.path.dirname(ebase)

        rst_filename = os.path.basename(ex_filename).replace('.py', '.rst')

        dir_map.setdefault(base_dir, []).append((ex_filename, rst_filename))

    for dirname, filenames in dir_map.iteritems():
        filenames = sorted(filenames, cmp=lambda a, b: cmp(a[1], b[1]))
        dir_map[dirname ] = filenames

    # Main index.
    mfd = open(os.path.join(rst_dir, 'index.rst'), 'w')
    mfd.write(_index % ('sfepy', 'SfePy autogenerated gallery', '=' * 27))

    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)
        ensure_path(full_dirname + os.path.sep)

        # Subdirectory index.
        ifd = open(os.path.join(full_dirname, 'index.rst'), 'w')
        ifd.write(_index % (dirname, dirname, '=' * len(dirname)))

        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)
            output('"%s"' % full_rst_filename.replace(rst_dir, '')[1:])
            rst_filename_ns = rst_filename.replace('.rst', '')
            ebase = ex_filename.replace(examples_dir, '')[1:]

            rst_ex_filename = _make_sphinx_path(ex_filename)
            docstring = get_default(import_file(ex_filename).__doc__,
                                    'missing description!')

            ifd.write('    %s\n' % rst_filename_ns)
            fig_include = ''
            fig_base = _get_fig_filenames(ebase, images_dir).next()
            for fig_filename in _get_fig_filenames(ebase, images_dir):
                rst_fig_filename = _make_sphinx_path(fig_filename)

                if os.path.exists(fig_filename):
                    fig_include += _image % rst_fig_filename + '\n'

            # Example rst file.
            fd = open(full_rst_filename, 'w')
            fd.write(_include % (fig_base, ebase, '=' * len(ebase),
                                 docstring,
                                 fig_include,
                                 rst_ex_filename, rst_ex_filename))
            fd.close()

        ifd.close()

        mfd.write('    %s/index\n' % dirname)

    mfd.close()

    output('...done')

    return dir_map
コード例 #45
0
ファイル: sfepy_example1.py プロジェクト: nikolajkiel/nikolaj
def main():
    from sfepy import data_dir

    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('--diffusivity', metavar='float', type=float,
                      action='store', dest='diffusivity',
                      default=1e-5, help=helps['diffusivity'])
    parser.add_option('--ic-max', metavar='float', type=float,
                      action='store', dest='ic_max',
                      default=2.0, help=helps['ic_max'])
    parser.add_option('--order', metavar='int', type=int,
                      action='store', dest='order',
                      default=2, help=helps['order'])
    parser.add_option('-r', '--refine', metavar='int', type=int,
                      action='store', dest='refine',
                      default=0, help=helps['refine'])
    parser.add_option('-p', '--probe',
                      action="store_true", dest='probe',
                      default=False, help=helps['probe'])
    parser.add_option('-s', '--show',
                      action="store_true", dest='show',
                      default=False, help=helps['show'])
    options, args = parser.parse_args()

    assert_((0 < options.order),
            'temperature approximation order must be at least 1!')

    output('using values:')
    output('  diffusivity:', options.diffusivity)
    output('  max. IC value:', options.ic_max)
    output('uniform mesh refinement level:', options.refine)

    mesh = Mesh.from_file(data_dir + '/meshes/3d/cylinder.mesh')
    domain = FEDomain('domain', mesh)

    if options.refine > 0:
        for ii in range(options.refine):
            output('refine %d...' % ii)
            domain = domain.refine()
            output('... %d nodes %d elements'
                   % (domain.shape.n_nod, domain.shape.n_el))

    omega = domain.create_region('Omega', 'all')
    left = domain.create_region('Left',
                                'vertices in x < 0.00001', 'facet')
    right = domain.create_region('Right',
                                 'vertices in x > 0.099999', 'facet')

    field = Field.from_args('fu', nm.float64, 'scalar', omega,
                            approx_order=options.order)

    T = FieldVariable('T', 'unknown', field, history=1)
    s = FieldVariable('s', 'test', field, primary_var_name='T')

    m = Material('m', diffusivity=options.diffusivity * nm.eye(3))

    integral = Integral('i', order=2*options.order)

    t1 = Term.new('dw_diffusion(m.diffusivity, s, T)',
                  integral, omega, m=m, s=s, T=T)
    t2 = Term.new('dw_volume_dot(s, dT/dt)',
                  integral, omega, s=s, T=T)
    eq = Equation('balance', t1 + t2)
    eqs = Equations([eq])

    # Boundary conditions.
    ebc1 = EssentialBC('T1', left, {'T.0' : 2.0})
    ebc2 = EssentialBC('T2', right, {'T.0' : -2.0})

    # Initial conditions.
    def get_ic(coors, ic):
        x, y, z = coors.T
        return 2 - 40.0 * x + options.ic_max * nm.sin(4 * nm.pi * x / 0.1)
    ic_fun = Function('ic_fun', get_ic)
    ic = InitialCondition('ic', omega, {'T.0' : ic_fun})

    ls = ScipyDirect({})

    nls_status = IndexedStruct()
    nls = Newton({'is_linear' : True}, lin_solver=ls, status=nls_status)

    pb = Problem('heat', equations=eqs, nls=nls, ls=ls)
    pb.set_bcs(ebcs=Conditions([ebc1, ebc2]))
    pb.set_ics(Conditions([ic]))

    tss = SimpleTimeSteppingSolver({'t0' : 0.0, 't1' : 100.0, 'n_step' : 11},
                                   problem=pb)
    tss.init_time()

    if options.probe:
        # Prepare probe data.
        probes, labels = gen_lines(pb)

        ev = pb.evaluate
        order = 2 * (options.order - 1)

        gfield = Field.from_args('gu', nm.float64, 'vector', omega,
                                approx_order=options.order - 1)
        dvel = FieldVariable('dvel', 'parameter', gfield,
                             primary_var_name='(set-to-None)')
        cfield = Field.from_args('gu', nm.float64, 'scalar', omega,
                                approx_order=options.order - 1)
        component = FieldVariable('component', 'parameter', cfield,
                                  primary_var_name='(set-to-None)')

        nls_options = {'eps_a' : 1e-16, 'i_max' : 1}

        if options.show:
            plt.ion()

    # Solve the problem using the time stepping solver.
    suffix = tss.ts.suffix
    for step, time, state in tss():
        if options.probe:
            # Probe the solution.
            dvel_qp = ev('ev_diffusion_velocity.%d.Omega(m.diffusivity, T)'
                         % order, copy_materials=False, mode='qp')
            project_by_component(dvel, dvel_qp, component, order,
                                 nls_options=nls_options)

            all_results = []
            for ii, probe in enumerate(probes):
                fig, results = probe_results(ii, T, dvel, probe, labels[ii])

                all_results.append(results)

            plt.tight_layout()
            fig.savefig('time_poisson_interactive_probe_%s.png'
                        % (suffix % step), bbox_inches='tight')

            if options.show:
                plt.draw()

            for ii, results in enumerate(all_results):
                output('probe %d (%s):' % (ii, probes[ii].name))
                output.level += 2
                for key, res in ordered_iteritems(results):
                    output(key + ':')
                    val = res[1]
                    output('  min: %+.2e, mean: %+.2e, max: %+.2e'
                           % (val.min(), val.mean(), val.max()))
                output.level -= 2
コード例 #46
0
def verify_task_dof_maps(dof_maps,
                         id_map,
                         field,
                         use_expand_dofs=False,
                         verbose=False):
    """
    Verify the counts and values of DOFs in `dof_maps` and `id_map`
    corresponding to `field`.

    Returns the vector with a task number for each DOF.
    """
    timer = Timer(start=True)
    if verbose:
        output('verifying...')
        output('total number of DOFs:', field.n_nod)
        output('number of tasks:', len(dof_maps))

    count = count2 = 0
    dofs = []
    if use_expand_dofs:
        vec = nm.empty(field.n_nod * field.n_components, dtype=nm.float64)

    else:
        vec = nm.empty(field.n_nod, dtype=nm.float64)
    for ir, dof_map in ordered_iteritems(dof_maps):
        n_owned = dof_map[3]
        offset = dof_map[4]
        o2 = offset + n_owned

        if verbose:
            output('task %d: %d owned on offset %d' % (ir, n_owned, offset))

        if not use_expand_dofs:
            aux = dof_map[0]
            assert_(nm.all((id_map[aux] >= offset) & (id_map[aux] < o2)))

        count2 += dof_map[3]

        count += len(dof_map[0])

        dofs.append(dof_map[0])
        vec[dof_map[0]] = ir
        for aux in dof_map[1]:
            if not use_expand_dofs:
                assert_(nm.all((id_map[aux] >= offset) & (id_map[aux] < o2)))

            count += len(aux)
            dofs.append(aux)
            vec[aux] = ir

    dofs = nm.concatenate(dofs)

    n_dof = vec.shape[0]

    assert_(n_dof == len(dofs))
    if not expand_dofs:
        assert_(nm.all(nm.sort(dofs) == nm.sort(id_map)))

    dofs = nm.unique(dofs)
    assert_(n_dof == len(dofs))

    assert_(n_dof == dofs[-1] + 1)
    assert_(n_dof == count)
    assert_(n_dof == count2)
    assert_(n_dof == len(id_map))
    assert_(n_dof == len(nm.unique(id_map)))

    output('...done in', timer.stop(), verbose=verbose)

    return vec
コード例 #47
0
def create_task_dof_maps(field,
                         cell_tasks,
                         inter_facets,
                         is_overlap=True,
                         use_expand_dofs=False,
                         save_inter_regions=False,
                         output_dir=None):
    """
    For each task list its inner and interface DOFs of the given field and
    create PETSc numbering that is consecutive in each subdomain.

    For each task, the DOF map has the following structure::

      [inner,
       [own_inter1, own_inter2, ...],
       [overlap_cells1, overlap_cells2, ...],
       n_task_total, task_offset]

    The overlapping cells are defined so that the system matrix corresponding
    to each task can be assembled independently, see [1]. TODO: Some "corner"
    cells may be added even if not needed - filter them out by using the PETSc
    DOFs range.

    When debugging domain partitioning problems, it is advisable to set
    `save_inter_regions` to True to save the task interfaces as meshes as well
    as vertex-based markers - to be used only with moderate problems and small
    numbers of tasks.

    [1] J. Sistek and F. Cirak. Parallel iterative solution of the
    incompressible Navier-Stokes equations with application to rotating
    wings. Submitted for publication, 2015
    """
    domain = field.domain
    cmesh = domain.cmesh

    if use_expand_dofs:
        id_map = nm.zeros(field.n_nod * field.n_components, dtype=nm.uint32)

    else:
        id_map = nm.zeros(field.n_nod, dtype=nm.uint32)

    def _get_dofs_region(field, region):
        dofs = field.get_dofs_in_region(region)
        if use_expand_dofs:
            dofs = expand_dofs(dofs, field.n_components)
        return dofs

    def _get_dofs_conn(field, conn):
        dofs = nm.unique(conn)
        if use_expand_dofs:
            dofs = expand_dofs(dofs, field.n_components)
        return dofs

    dof_maps = {}
    count = 0
    inter_count = 0
    ocs = nm.zeros(0, dtype=nm.int32)
    cell_parts = []
    for ir, ntasks in ordered_iteritems(inter_facets):
        cells = nm.where(cell_tasks == ir)[0].astype(nm.int32)
        cell_parts.append(cells)

        cregion = Region.from_cells(cells, domain, name='task_%d' % ir)
        domain.regions.append(cregion)
        dofs = _get_dofs_region(field, cregion)

        rdof_map = dof_maps.setdefault(ir, [None, [], [], 0, 0])
        inter_dofs = []
        for ic, facets in ordered_iteritems(ntasks):
            cdof_map = dof_maps.setdefault(ic, [None, [], [], 0, 0])

            name = 'inter_%d_%d' % (ir, ic)
            ii = ir

            region = Region.from_facets(facets,
                                        domain,
                                        name,
                                        parent=cregion.name)
            region.update_shape()

            if save_inter_regions:
                output_dir = output_dir if output_dir is not None else '.'
                filename = os.path.join(output_dir, '%s.mesh' % name)

                aux = domain.mesh.from_region(region,
                                              domain.mesh,
                                              is_surface=True)
                aux.write(filename, io='auto')

                mask = nm.zeros((domain.mesh.n_nod, 1), dtype=nm.float64)
                mask[region.vertices] = 1
                out = {name: Struct(name=name, mode='vertex', data=mask)}
                filename = os.path.join(output_dir, '%s.h5' % name)
                domain.mesh.write(filename, out=out, io='auto')

            inter_dofs.append(_get_dofs_region(field, region))

            sd = FESurface('surface_data_%s' % region.name, region,
                           field.efaces, field.econn, field.region)
            econn = sd.get_connectivity()
            n_facet = econn.shape[0]

            ii2 = max(int(n_facet / 2), 1)

            dr = _get_dofs_conn(field, econn[:ii2])
            ii = nm.where((id_map[dr] == 0))[0]
            n_new = len(ii)
            if n_new:
                rdof_map[1].append(dr[ii])
                rdof_map[3] += n_new
                id_map[dr[ii]] = 1
                inter_count += n_new
                count += n_new

                if is_overlap:
                    ovs = cmesh.get_incident(0, region.facets[:ii2],
                                             cmesh.tdim - 1)
                    ocs = cmesh.get_incident(cmesh.tdim, ovs, 0)
                    rdof_map[2].append(ocs.astype(nm.int32))

            dc = _get_dofs_conn(field, econn[ii2:])
            ii = nm.where((id_map[dc] == 0))[0]
            n_new = len(ii)
            if n_new:
                cdof_map[1].append(dc[ii])
                cdof_map[3] += n_new
                id_map[dc[ii]] = 1
                inter_count += n_new
                count += n_new

                if is_overlap:
                    ovs = cmesh.get_incident(0, region.facets[ii2:],
                                             cmesh.tdim - 1)
                    ocs = cmesh.get_incident(cmesh.tdim, ovs, 0)
                    cdof_map[2].append(ocs.astype(nm.int32))

        domain.regions.pop()  # Remove the cell region.

        inner_dofs = nm.setdiff1d(dofs, nm.concatenate(inter_dofs))
        n_inner = len(inner_dofs)
        rdof_map[3] += n_inner
        assert_(nm.all(id_map[inner_dofs] == 0))
        id_map[inner_dofs] = 1
        count += n_inner

        rdof_map[0] = inner_dofs

    offset = 0
    overlap_cells = []
    for ir, dof_map in ordered_iteritems(dof_maps):
        n_owned = dof_map[3]

        i0 = len(dof_map[0])
        id_map[dof_map[0]] = nm.arange(offset, offset + i0, dtype=nm.uint32)
        for aux in dof_map[1]:
            i1 = len(aux)
            id_map[aux] = nm.arange(offset + i0,
                                    offset + i0 + i1,
                                    dtype=nm.uint32)
            i0 += i1

        if len(dof_map[2]):
            ocs = nm.unique(nm.concatenate(dof_map[2]))

        else:
            ocs = nm.zeros(0, dtype=nm.int32)

        overlap_cells.append(ocs)

        assert_(i0 == n_owned)

        dof_map[4] = offset
        offset += n_owned

    if not len(dof_maps):
        dofs = _get_dofs_region(field, field.region)
        dof_maps[0] = [dofs, [], [], len(dofs), 0]
        id_map[:] = nm.arange(len(dofs), dtype=nm.uint32)

    if not len(cell_parts):
        cell_parts.append(nm.arange(len(cell_tasks), dtype=nm.int32))

    if not len(overlap_cells):
        overlap_cells.append(nm.zeros(0, dtype=nm.int32))

    return dof_maps, id_map, cell_parts, overlap_cells
コード例 #48
0
ファイル: show_terms_use.py プロジェクト: uberstig/sfepy
def main():
    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('-c',
                      '--counts',
                      action='store_true',
                      dest='counts',
                      default=False,
                      help=helps['counts'])
    parser.add_option('-u',
                      '--unused',
                      action='store_true',
                      dest='unused',
                      default=False,
                      help=helps['unused'])
    options, args = parser.parse_args()

    if len(args) > 0:
        pdf_dir = os.path.realpath(args[0])

    else:
        parser.print_help(),
        return

    required, other = get_standard_keywords()

    terms_use = dict_from_keys_init(term_table.keys(), set)

    for filename in locate_files('*.py', pdf_dir):
        base = filename.replace(pdf_dir, '').lstrip(os.path.sep)
        output('trying "%s"...' % base)

        try:
            conf = ProblemConf.from_file(filename,
                                         required,
                                         other,
                                         verbose=False)

        except:
            output('...failed')
            continue

        use = conf.options.get('use_equations', 'equations')
        eqs_conf = getattr(conf, use)
        for key, eq_conf in eqs_conf.iteritems():
            term_descs = parse_definition(eq_conf)
            for td in term_descs:
                terms_use[td.name].add(base)

        output('...ok')
    output('...done')

    if options.unused:
        output('unused terms:')

        unused = [
            name for name in terms_use.keys() if len(terms_use[name]) == 0
        ]
        for name in sorted(unused):
            output('  ' + name)

        output('total: %d' % len(unused))

    else:
        output('terms use:')
        for name, ex_names in ordered_iteritems(terms_use):
            output('%s: %d' % (name, len(ex_names)))
            if not options.counts:
                for ex_name in sorted(ex_names):
                    output('  ' + ex_name)
コード例 #49
0
ファイル: gen_gallery.py プロジェクト: muratchelik/sfepy
def generate_rst_files(rst_dir, examples_dir, images_dir):
    """
    Generate Sphinx rst files for examples in `examples_dir` with images
    in `images_dir` and put them into `rst_dir`.

    Returns
    -------
    dir_map : dict
        The directory mapping of examples and corresponding rst files.
    """
    ensure_path(rst_dir + os.path.sep)

    output('generating rst files...')

    dir_map = {}
    for ex_filename in locate_files('*.py', examples_dir):
        if _omit(ex_filename): continue

        ebase = ex_filename.replace(examples_dir, '')[1:]
        base_dir = os.path.dirname(ebase)
        rst_filename = ebase2fbase(ebase) + '.rst'
        dir_map.setdefault(base_dir, []).append((ex_filename, rst_filename))

    for dirname, filenames in six.iteritems(dir_map):
        filenames = sorted(filenames, key=lambda a: a[1])
        dir_map[dirname] = filenames

    # Main index.
    mfd = open(os.path.join(rst_dir, 'index.rst'), 'w')
    mfd.write(_index % ('examples', 'Examples', '=' * 8))

    for dirname, filenames in ordered_iteritems(dir_map):
        # Subdirectory index.
        ifd = open(os.path.join(rst_dir, '%s-index.rst' % dirname), 'w')
        ifd.write(_index % (dirname + '-examples',
                            dirname, '=' * len(dirname)))

        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(rst_dir, rst_filename)
            output('"%s"' % rst_filename)
            ebase = ex_filename.replace(examples_dir, '')[1:]

            rst_base = os.path.splitext(rst_filename)[0]

            rst_ex_filename = _make_sphinx_path(ex_filename)
            docstring = get_default(import_file(ex_filename).__doc__,
                                    'missing description!')

            ifd.write('   %s <%s>\n' % (os.path.basename(ebase), rst_base))
            fig_include = ''
            for fig_filename in _get_fig_filenames(ebase, images_dir):
                rst_fig_filename = _make_sphinx_path(fig_filename)
                if os.path.exists(fig_filename):
                    fig_include += _image % rst_fig_filename + '\n'
                else:
                    output('   warning: figure "%s" not found' % fig_filename)


            # Example rst file.
            fd = open(full_rst_filename, 'w')
            fd.write(_include % (rst_base, ebase, '=' * len(ebase),
                                 docstring,
                                 fig_include,
                                 rst_ex_filename, rst_ex_filename))
            fd.close()

        ifd.close()

        mfd.write('   %s-index\n' % dirname)

    mfd.close()

    output('...done')

    return dir_map
コード例 #50
0
def verify_task_dof_maps(dof_maps, id_map, field, use_expand_dofs=False,
                         verbose=False):
    """
    Verify the counts and values of DOFs in `dof_maps` and `id_map`
    corresponding to `field`.

    Returns the vector with a task number for each DOF.
    """
    tt = time.clock()
    if verbose:
        output('verifying...')
        output('total number of DOFs:', field.n_nod)
        output('number of tasks:', len(dof_maps))

    count = count2 = 0
    dofs = []
    if use_expand_dofs:
        vec = nm.empty(field.n_nod * field.n_components, dtype=nm.float64)

    else:
        vec = nm.empty(field.n_nod, dtype=nm.float64)
    for ir, dof_map in ordered_iteritems(dof_maps):
        n_owned = dof_map[3]
        offset = dof_map[4]
        o2 = offset + n_owned

        if verbose:
            output('task %d: %d owned on offset %d' % (ir, n_owned, offset))

        if not use_expand_dofs:
            aux = dof_map[0]
            assert_(nm.all((id_map[aux] >= offset) & (id_map[aux] < o2)))

        count2 += dof_map[3]

        count += len(dof_map[0])

        dofs.append(dof_map[0])
        vec[dof_map[0]] = ir
        for aux in dof_map[1]:
            if not use_expand_dofs:
                assert_(nm.all((id_map[aux] >= offset) & (id_map[aux] < o2)))

            count += len(aux)
            dofs.append(aux)
            vec[aux] = ir

    dofs = nm.concatenate(dofs)

    n_dof = vec.shape[0]

    assert_(n_dof == len(dofs))
    if not expand_dofs:
        assert_(nm.all(nm.sort(dofs) == nm.sort(id_map)))

    dofs = nm.unique(dofs)
    assert_(n_dof == len(dofs))

    assert_(n_dof == dofs[-1] + 1)
    assert_(n_dof == count)
    assert_(n_dof == count2)
    assert_(n_dof == len(id_map))
    assert_(n_dof == len(nm.unique(id_map)))

    output('...done in', time.clock() - tt, verbose=verbose)

    return vec
コード例 #51
0
    def _save_dict(self, adict, fd, names, format):
        toremove = []
        adict_complex = {}
        for key, val in ordered_iteritems(adict):
            if hasattr(val, 'dtype') and \
                    nm.issubdtype(val.dtype, nm.complexfloating):
                adict_complex[key + '_real'] = val.real
                adict_complex[key + '_imag'] = val.imag
                toremove.append(key)
        for key in toremove:
            del (adict[key])

        adict.update(adict_complex)

        for key, val in ordered_iteritems(adict):
            try:
                lname = names[key]
            except:
                lname = key
            fd.write('%s:\n' % lname)

            if hasattr(val, 'to_file_txt'):
                if val.to_file_txt is not None:
                    val.to_file_txt(fd, format, val)

                else:
                    fd.write('--\n')

            elif isinstance(val, dict):
                self._save_dict(val, fd, names, format)
                fd.write('\n')

            elif isinstance(val, list):
                if isinstance(val[0], basestr):
                    fd.write('\n'.join(val) + '\n')

            elif isinstance(val, basestr):
                fd.write(val + '\n')

            elif isinstance(val, float):
                fd.write('%e\n' % val)

            elif isinstance(val, nm.ndarray):
                if val.ndim == 0:
                    fd.write(format % val)
                    fd.write('\n')

                elif val.ndim == 1:
                    for ic in range(val.shape[0]):
                        fd.write(format % val[ic])
                        if ic < (val.shape[0] - 1):
                            fd.write(', ')
                        else:
                            fd.write('\n')

                elif val.ndim == 2:
                    for ir in range(val.shape[0]):
                        for ic in range(val.shape[1]):
                            fd.write(format % val[ir, ic])
                            if ic < (val.shape[1] - 1):
                                fd.write(', ')
                            elif ir < (val.shape[0] - 1):
                                fd.write(';\n')
                    fd.write('\n')

                elif val.ndim == 3:
                    for ii in range(val.shape[0]):
                        fd.write('  step %d:\n' % ii)
                        for ir in range(val.shape[1]):
                            for ic in range(val.shape[2]):
                                fd.write('  ' + format % val[ii, ir, ic])
                                if ic < (val.shape[2] - 1):
                                    fd.write(', ')
                                elif ir < (val.shape[1] - 1):
                                    fd.write(';\n')
                        fd.write('\n')
                    fd.write('\n')

            else:
                fd.write('--\n')

            fd.write('\n')
コード例 #52
0
def plot_log(axs,
             log,
             info,
             xticks=None,
             yticks=None,
             xnbins=None,
             ynbins=None,
             groups=None,
             show_legends=True,
             swap_axes=False):
    """
    Plot log data returned by :func:`read_log()` into a specified figure.

    Parameters
    ----------
    axs : sequence of matplotlib.axes.Axes
        The list of axes for the log data plots.
    log : dict
        The log with data names as keys and ``(xs, ys, vlines)`` as values.
    info : dict
        The log plot configuration with subplot numbers as keys.
    xticks : list of arrays, optional
        The list of x-axis ticks (array or None) for each subplot.
    yticks : list of arrays, optional
        The list of y-axis ticks (array or None) for each subplot.
    xnbins : list, optional
        The list of x-axis number of bins (int or None) for each subplot.
    ynbins : list, optional
        The list of y-axis number of bins (int or None) for each subplot.
    groups : list, optional
        The list of data groups subplots. If not given, all groups are plotted.
    show_legends : bool
        If True, show legends in plots.
    swap_axes : bool
        If True, swap the axes of the plots.
    """
    import matplotlib.pyplot as plt

    if axs is None:
        fig = plt.figure()

    else:
        fig = None

    if groups is None:
        n_gr = len(info)
        groups = nm.arange(n_gr)

    else:
        n_gr = len(groups)

    n_col = min(5.0, nm.fix(nm.sqrt(n_gr)))
    if int(n_col) == 0:
        n_row = 0

    else:
        n_row = int(nm.ceil(n_gr / n_col))
        n_col = int(n_col)

    if xticks is None:
        xticks = [None] * n_gr

    if yticks is None:
        yticks = [None] * n_gr

    if xnbins is None:
        xnbins = [None] * n_gr

    if ynbins is None:
        ynbins = [None] * n_gr

    isub = offset = 0
    for ig, (xlabel, ylabel, yscale, names, plot_kwargs) \
        in ordered_iteritems(info):
        if ig not in groups: continue

        if axs is None:
            ax = fig.add_subplot(n_row, n_col, isub + 1)

        else:
            ax = axs[ig]

        if not swap_axes:
            xnb, ynb = xnbins[isub], ynbins[isub]
            xti, yti = xticks[isub], yticks[isub]
            ax.set_yscale(yscale)
            for ip, name in enumerate(names):
                xs, ys, vlines = log[ip + offset]
                draw_data(ax, xs, ys, name, plot_kwargs[ip])

                for x in vlines:
                    ax.axvline(x, color='k', alpha=0.3)

        else:
            xlabel, ylabel = ylabel, xlabel
            xti, yti = yticks[isub], xticks[isub]
            xnb, ynb = ynbins[isub], xnbins[isub]
            ax.set_xscale(yscale)

            for ip, name in enumerate(names):
                xs, ys, vlines = log[ip + offset]
                draw_data(ax, xs, ys, name, plot_kwargs[ip], swap_axes=True)

                for x in vlines:
                    ax.axhline(x, color='k', alpha=0.3)

        offset += len(names)

        if xti is not None:
            ax.set_xticks(xti)

        if yti is not None:
            ax.set_yticks(yti)

        if xnb is not None:
            ax.locator_params(tight=True, axis='x', nbins=xnb)

        if ynb is not None:
            ax.locator_params(tight=True, axis='y', nbins=ynb)

        if xlabel:
            ax.set_xlabel(xlabel)

        if ylabel:
            ax.set_ylabel(ylabel)

        if show_legends:
            ax.legend(loc='best')

        isub += 1

    plt.tight_layout(pad=0.5)
コード例 #53
0
    def _save_dict(self, adict, fd, names, format):
        for key, val in ordered_iteritems(adict):
            try:
                lname = names[key]
            except:
                lname = key
            fd.write('%s:\n' % lname)

            if hasattr(val, 'to_file_txt'):
                if val.to_file_txt is not None:
                    val.to_file_txt(fd, format, val)

                else:
                    fd.write('--\n')

            elif isinstance(val, dict):
                self._save_dict(val, fd, names, format)
                fd.write('\n')

            elif isinstance(val, list):
                if isinstance(val[0], basestr):
                    fd.write('\n'.join(val) + '\n')

            elif isinstance(val, basestr):
                fd.write(val + '\n')

            elif isinstance(val, float):
                fd.write('%e\n' % val)

            elif isinstance(val, nm.ndarray):
                if val.ndim == 0:
                    fd.write(format % val)
                    fd.write('\n')

                elif val.ndim == 1:
                    for ic in range(val.shape[0]):
                        fd.write(format % val[ic])
                        if ic < (val.shape[0] - 1):
                            fd.write(', ')
                        else:
                            fd.write('\n')

                elif val.ndim == 2:
                    for ir in range(val.shape[0]):
                        for ic in range(val.shape[1]):
                            fd.write(format % val[ir,ic])
                            if ic < (val.shape[1] - 1):
                                fd.write(', ')
                            elif ir < (val.shape[0] - 1):
                                fd.write(';\n')
                    fd.write('\n')

                elif val.ndim == 3:
                    for ii in range(val.shape[0]):
                        fd.write('  step %d:\n' % ii)
                        for ir in range(val.shape[1]):
                            for ic in range(val.shape[2]):
                                fd.write('  ' + format % val[ii,ir,ic])
                                if ic < (val.shape[2] - 1):
                                    fd.write(', ')
                                elif ir < (val.shape[1] - 1):
                                    fd.write(';\n')
                        fd.write('\n')
                    fd.write('\n')

            else:
                fd.write('--\n')

            fd.write('\n')
コード例 #54
0
    def test_quadratures(self):
        """
        Test if the quadratures have orders they claim to have, using
        symbolic integration by sympy.
        """
        from sfepy.fem.quadratures import quadrature_tables
        from sfepy.fem.integrals import Integral

        if sm is None:
            self.report('cannot import sympy, skipping')
            return True

        quad = Integral('test_integral')

        ok = True
        failed = []
        for geometry, qps in ordered_iteritems(quadrature_tables):
            self.report('geometry:', geometry)

            for order, qp in ordered_iteritems(qps):
                self.report('order:', order)

                dim = int(geometry[0])
                n_v = int(geometry[2])
                is_simplex = n_v == (dim + 1)

                xs, poly, limits, integral = get_poly(order, dim,
                                                      is_simplex=is_simplex)

                self.report('  polynomial:', poly)
                self.report('  limits:', limits)
                self.report('  integral:', integral)

                def fun(coors):
                    vals = nm.empty(coors.shape[0], dtype=nm.float64)

                    subs = {}
                    for ir, cc in enumerate(coors):
                        for ic, x in enumerate(xs):
                            subs[x] = coors[ir,ic]
                        vals[ir] = float(poly.subs(subs))

                    return vals
                    
                val = quad.integrate(fun, order=order, geometry=geometry)
                _ok = nm.allclose(val, float(integral), rtol=0.0, atol=1e-15)

                self.report('  sym. == num.: %f == %f -> %s' %
                            (float(integral), val, _ok))

                if not _ok:
                    failed.append((geometry, order, float(integral), val))

                ok = ok and _ok

        if not ok:
            self.report('failed:')
            for aux in failed:
                self.report(aux)

        return ok
コード例 #55
0
ファイル: its2D_interactive.py プロジェクト: uberstig/sfepy
def main():
    from sfepy import data_dir

    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('--young',
                      metavar='float',
                      type=float,
                      action='store',
                      dest='young',
                      default=2000.0,
                      help=helps['young'])
    parser.add_option('--poisson',
                      metavar='float',
                      type=float,
                      action='store',
                      dest='poisson',
                      default=0.4,
                      help=helps['poisson'])
    parser.add_option('--load',
                      metavar='float',
                      type=float,
                      action='store',
                      dest='load',
                      default=-1000.0,
                      help=helps['load'])
    parser.add_option('--order',
                      metavar='int',
                      type=int,
                      action='store',
                      dest='order',
                      default=1,
                      help=helps['order'])
    parser.add_option('-r',
                      '--refine',
                      metavar='int',
                      type=int,
                      action='store',
                      dest='refine',
                      default=0,
                      help=helps['refine'])
    parser.add_option('-s',
                      '--show',
                      action="store_true",
                      dest='show',
                      default=False,
                      help=helps['show'])
    parser.add_option('-p',
                      '--probe',
                      action="store_true",
                      dest='probe',
                      default=False,
                      help=helps['probe'])
    options, args = parser.parse_args()

    assert_((0.0 < options.poisson < 0.5),
            "Poisson's ratio must be in ]0, 0.5[!")
    assert_((0 < options.order),
            'displacement approximation order must be at least 1!')

    output('using values:')
    output("  Young's modulus:", options.young)
    output("  Poisson's ratio:", options.poisson)
    output('  vertical load:', options.load)
    output('uniform mesh refinement level:', options.refine)

    # Build the problem definition.
    mesh = Mesh.from_file(data_dir + '/meshes/2d/its2D.mesh')
    domain = FEDomain('domain', mesh)

    if options.refine > 0:
        for ii in xrange(options.refine):
            output('refine %d...' % ii)
            domain = domain.refine()
            output('... %d nodes %d elements' %
                   (domain.shape.n_nod, domain.shape.n_el))

    omega = domain.create_region('Omega', 'all')
    left = domain.create_region('Left', 'vertices in x < 0.001', 'facet')
    bottom = domain.create_region('Bottom', 'vertices in y < 0.001', 'facet')
    top = domain.create_region('Top', 'vertex 2', 'vertex')

    field = Field.from_args('fu',
                            nm.float64,
                            'vector',
                            omega,
                            approx_order=options.order)

    u = FieldVariable('u', 'unknown', field)
    v = FieldVariable('v', 'test', field, primary_var_name='u')

    D = stiffness_from_youngpoisson(2, options.young, options.poisson)

    asphalt = Material('Asphalt', D=D)
    load = Material('Load', values={'.val': [0.0, options.load]})

    integral = Integral('i', order=2 * options.order)
    integral0 = Integral('i', order=0)

    t1 = Term.new('dw_lin_elastic(Asphalt.D, v, u)',
                  integral,
                  omega,
                  Asphalt=asphalt,
                  v=v,
                  u=u)
    t2 = Term.new('dw_point_load(Load.val, v)', integral0, top, Load=load, v=v)
    eq = Equation('balance', t1 - t2)
    eqs = Equations([eq])

    xsym = EssentialBC('XSym', bottom, {'u.1': 0.0})
    ysym = EssentialBC('YSym', left, {'u.0': 0.0})

    ls = ScipyDirect({})

    nls_status = IndexedStruct()
    nls = Newton({}, lin_solver=ls, status=nls_status)

    pb = Problem('elasticity', equations=eqs, nls=nls, ls=ls)

    pb.time_update(ebcs=Conditions([xsym, ysym]))

    # Solve the problem.
    state = pb.solve()
    output(nls_status)

    # Postprocess the solution.
    out = state.create_output_dict()
    out = stress_strain(out, pb, state, extend=True)
    pb.save_state('its2D_interactive.vtk', out=out)

    gdata = geometry_data['2_3']
    nc = len(gdata.coors)

    integral_vn = Integral('ivn',
                           coors=gdata.coors,
                           weights=[gdata.volume / nc] * nc)

    nodal_stress(out, pb, state, integrals=Integrals([integral_vn]))

    if options.probe:
        # Probe the solution.
        probes, labels = gen_lines(pb)

        sfield = Field.from_args('sym_tensor',
                                 nm.float64,
                                 3,
                                 omega,
                                 approx_order=options.order - 1)
        stress = FieldVariable('stress',
                               'parameter',
                               sfield,
                               primary_var_name='(set-to-None)')
        strain = FieldVariable('strain',
                               'parameter',
                               sfield,
                               primary_var_name='(set-to-None)')

        cfield = Field.from_args('component',
                                 nm.float64,
                                 1,
                                 omega,
                                 approx_order=options.order - 1)
        component = FieldVariable('component',
                                  'parameter',
                                  cfield,
                                  primary_var_name='(set-to-None)')

        ev = pb.evaluate
        order = 2 * (options.order - 1)
        strain_qp = ev('ev_cauchy_strain.%d.Omega(u)' % order, mode='qp')
        stress_qp = ev('ev_cauchy_stress.%d.Omega(Asphalt.D, u)' % order,
                       mode='qp',
                       copy_materials=False)

        project_by_component(strain, strain_qp, component, order)
        project_by_component(stress, stress_qp, component, order)

        all_results = []
        for ii, probe in enumerate(probes):
            fig, results = probe_results(u, strain, stress, probe, labels[ii])

            fig.savefig('its2D_interactive_probe_%d.png' % ii)
            all_results.append(results)

        for ii, results in enumerate(all_results):
            output('probe %d:' % ii)
            output.level += 2
            for key, res in ordered_iteritems(results):
                output(key + ':')
                val = res[1]
                output('  min: %+.2e, mean: %+.2e, max: %+.2e' %
                       (val.min(), val.mean(), val.max()))
            output.level -= 2

    if options.show:
        # Show the solution. If the approximation order is greater than 1, the
        # extra DOFs are simply thrown away.
        from sfepy.postprocess.viewer import Viewer

        view = Viewer('its2D_interactive.vtk')
        view(vector_mode='warp_norm',
             rel_scaling=1,
             is_scalar_bar=True,
             is_wireframe=True)
コード例 #56
0
ファイル: gen_gallery.py プロジェクト: vondrejc/sfepy
def generate_rst_files(rst_dir, examples_dir, images_dir):
    """
    Generate Sphinx rst files for examples in `examples_dir` with images
    in `images_dir` and put them into `rst_dir`.

    Returns
    -------
    dir_map : dict
        The directory mapping of examples and corresponding rst files.
    """
    ensure_path(rst_dir + os.path.sep)

    output("generating rst files...")

    dir_map = {}
    for ex_filename in locate_files("*.py", examples_dir):
        if _omit(ex_filename):
            continue

        ebase = ex_filename.replace(examples_dir, "")[1:]
        base_dir = os.path.dirname(ebase)

        rst_filename = os.path.basename(ex_filename).replace(".py", ".rst")

        dir_map.setdefault(base_dir, []).append((ex_filename, rst_filename))

    for dirname, filenames in dir_map.iteritems():
        filenames = sorted(filenames, cmp=lambda a, b: cmp(a[1], b[1]))
        dir_map[dirname] = filenames

    # Main index.
    mfd = open(os.path.join(rst_dir, "index.rst"), "w")
    mfd.write(_index % ("sfepy", "SfePy autogenerated gallery", "=" * 27))

    for dirname, filenames in ordered_iteritems(dir_map):
        full_dirname = os.path.join(rst_dir, dirname)
        ensure_path(full_dirname + os.path.sep)

        # Subdirectory index.
        ifd = open(os.path.join(full_dirname, "index.rst"), "w")
        ifd.write(_index % (dirname, dirname, "=" * len(dirname)))

        for ex_filename, rst_filename in filenames:
            full_rst_filename = os.path.join(full_dirname, rst_filename)
            output('"%s"' % full_rst_filename.replace(rst_dir, "")[1:])
            rst_filename_ns = rst_filename.replace(".rst", "")
            ebase = ex_filename.replace(examples_dir, "")[1:]

            rst_ex_filename = _make_sphinx_path(ex_filename)
            docstring = get_default(import_file(ex_filename).__doc__, "missing description!")

            ifd.write("    %s\n" % rst_filename_ns)
            fig_include = ""
            fig_base = _get_fig_filenames(ebase, images_dir).next()
            for fig_filename in _get_fig_filenames(ebase, images_dir):
                rst_fig_filename = _make_sphinx_path(fig_filename)

                if os.path.exists(fig_filename):
                    fig_include += _image % rst_fig_filename + "\n"

            # Example rst file.
            fd = open(full_rst_filename, "w")
            fd.write(
                _include % (fig_base, ebase, "=" * len(ebase), docstring, fig_include, rst_ex_filename, rst_ex_filename)
            )
            fd.close()

        ifd.close()

        mfd.write("    %s/index\n" % dirname)

    mfd.close()

    output("...done")

    return dir_map
コード例 #57
0
ファイル: its2D_interactive.py プロジェクト: Nasrollah/sfepy
def main():
    from sfepy import data_dir

    parser = OptionParser(usage=usage, version='%prog')
    parser.add_option('--young', metavar='float', type=float,
                      action='store', dest='young',
                      default=2000.0, help=helps['young'])
    parser.add_option('--poisson', metavar='float', type=float,
                      action='store', dest='poisson',
                      default=0.4, help=helps['poisson'])
    parser.add_option('--load', metavar='float', type=float,
                      action='store', dest='load',
                      default=-1000.0, help=helps['load'])
    parser.add_option('--order', metavar='int', type=int,
                      action='store', dest='order',
                      default=1, help=helps['order'])
    parser.add_option('-r', '--refine', metavar='int', type=int,
                      action='store', dest='refine',
                      default=0, help=helps['refine'])
    parser.add_option('-s', '--show',
                      action="store_true", dest='show',
                      default=False, help=helps['show'])
    parser.add_option('-p', '--probe',
                      action="store_true", dest='probe',
                      default=False, help=helps['probe'])
    options, args = parser.parse_args()

    assert_((0.0 < options.poisson < 0.5),
            "Poisson's ratio must be in ]0, 0.5[!")
    assert_((0 < options.order),
            'displacement approximation order must be at least 1!')

    output('using values:')
    output("  Young's modulus:", options.young)
    output("  Poisson's ratio:", options.poisson)
    output('  vertical load:', options.load)
    output('uniform mesh refinement level:', options.refine)

    # Build the problem definition.
    mesh = Mesh.from_file(data_dir + '/meshes/2d/its2D.mesh')
    domain = FEDomain('domain', mesh)

    if options.refine > 0:
        for ii in range(options.refine):
            output('refine %d...' % ii)
            domain = domain.refine()
            output('... %d nodes %d elements'
                   % (domain.shape.n_nod, domain.shape.n_el))

    omega = domain.create_region('Omega', 'all')
    left = domain.create_region('Left',
                                'vertices in x < 0.001', 'facet')
    bottom = domain.create_region('Bottom',
                                  'vertices in y < 0.001', 'facet')
    top = domain.create_region('Top', 'vertex 2', 'vertex')

    field = Field.from_args('fu', nm.float64, 'vector', omega,
                            approx_order=options.order)

    u = FieldVariable('u', 'unknown', field)
    v = FieldVariable('v', 'test', field, primary_var_name='u')

    D = stiffness_from_youngpoisson(2, options.young, options.poisson)

    asphalt = Material('Asphalt', D=D)
    load = Material('Load', values={'.val' : [0.0, options.load]})

    integral = Integral('i', order=2*options.order)
    integral0 = Integral('i', order=0)

    t1 = Term.new('dw_lin_elastic(Asphalt.D, v, u)',
                  integral, omega, Asphalt=asphalt, v=v, u=u)
    t2 = Term.new('dw_point_load(Load.val, v)',
                  integral0, top, Load=load, v=v)
    eq = Equation('balance', t1 - t2)
    eqs = Equations([eq])

    xsym = EssentialBC('XSym', bottom, {'u.1' : 0.0})
    ysym = EssentialBC('YSym', left, {'u.0' : 0.0})

    ls = ScipyDirect({})

    nls_status = IndexedStruct()
    nls = Newton({}, lin_solver=ls, status=nls_status)

    pb = Problem('elasticity', equations=eqs, nls=nls, ls=ls)

    pb.time_update(ebcs=Conditions([xsym, ysym]))

    # Solve the problem.
    state = pb.solve()
    output(nls_status)

    # Postprocess the solution.
    out = state.create_output_dict()
    out = stress_strain(out, pb, state, extend=True)
    pb.save_state('its2D_interactive.vtk', out=out)

    gdata = geometry_data['2_3']
    nc = len(gdata.coors)

    integral_vn = Integral('ivn', coors=gdata.coors,
                          weights=[gdata.volume / nc] * nc)

    nodal_stress(out, pb, state, integrals=Integrals([integral_vn]))

    if options.probe:
        # Probe the solution.
        probes, labels = gen_lines(pb)

        sfield = Field.from_args('sym_tensor', nm.float64, 3, omega,
                                approx_order=options.order - 1)
        stress = FieldVariable('stress', 'parameter', sfield,
                               primary_var_name='(set-to-None)')
        strain = FieldVariable('strain', 'parameter', sfield,
                               primary_var_name='(set-to-None)')

        cfield = Field.from_args('component', nm.float64, 1, omega,
                                 approx_order=options.order - 1)
        component = FieldVariable('component', 'parameter', cfield,
                                  primary_var_name='(set-to-None)')

        ev = pb.evaluate
        order = 2 * (options.order - 1)
        strain_qp = ev('ev_cauchy_strain.%d.Omega(u)' % order, mode='qp')
        stress_qp = ev('ev_cauchy_stress.%d.Omega(Asphalt.D, u)' % order,
                       mode='qp', copy_materials=False)

        project_by_component(strain, strain_qp, component, order)
        project_by_component(stress, stress_qp, component, order)

        all_results = []
        for ii, probe in enumerate(probes):
            fig, results = probe_results(u, strain, stress, probe, labels[ii])

            fig.savefig('its2D_interactive_probe_%d.png' % ii)
            all_results.append(results)

        for ii, results in enumerate(all_results):
            output('probe %d:' % ii)
            output.level += 2
            for key, res in ordered_iteritems(results):
                output(key + ':')
                val = res[1]
                output('  min: %+.2e, mean: %+.2e, max: %+.2e'
                       % (val.min(), val.mean(), val.max()))
            output.level -= 2

    if options.show:
        # Show the solution. If the approximation order is greater than 1, the
        # extra DOFs are simply thrown away.
        from sfepy.postprocess.viewer import Viewer

        view = Viewer('its2D_interactive.vtk')
        view(vector_mode='warp_norm', rel_scaling=1,
             is_scalar_bar=True, is_wireframe=True)