示例#1
0
def inject_viscid_styles(show_warning=True):
    try:
        from matplotlib import rc_params_from_file, style

        styl_dir = os.path.realpath(os.path.dirname(__file__))
        styl_dir = os.path.abspath(os.path.join(styl_dir, "styles"))
        style_sheets = glob(os.path.join(styl_dir, "*.mplstyle"))

        if LooseVersion(matplotlib.__version__) < LooseVersion("1.5.0"):
            tmpfname = tempfile.mkstemp()[1]
        else:
            tmpfname = None

        for styl_fname in style_sheets:
            styl_name = os.path.splitext(os.path.basename(styl_fname))[0]

            if tmpfname:
                # hack the cycler stuff back to the pre-1.5.0 syntax
                with open(styl_fname, 'r') as fin:
                    with open(tmpfname, 'w') as fout:
                        fout.write(_cycler2prop_cycle(fin.read()))
                styl_fname = tmpfname
            params = rc_params_from_file(styl_fname,
                                         use_default_template=False)
            style.library[styl_name] = params

        if tmpfname:
            os.unlink(tmpfname)
        style.reload_library()

    except ImportError:
        if show_warning:
            logger.debug("Upgrade to matplotlib >= 1.5.0 to use style sheets")
示例#2
0
def inject_viscid_styles(show_warning=True):
    try:
        from matplotlib import rc_params_from_file, style

        styl_dir = os.path.realpath(os.path.dirname(__file__))
        styl_dir = os.path.abspath(os.path.join(styl_dir, "styles"))
        style_sheets = glob(os.path.join(styl_dir, "*.mplstyle"))

        if LooseVersion(matplotlib.__version__) < LooseVersion("1.5.0"):
            tmpfname = tempfile.mkstemp()[1]
        else:
            tmpfname = None

        for styl_fname in style_sheets:
            styl_name = os.path.splitext(os.path.basename(styl_fname))[0]

            if tmpfname:
                # hack the cycler stuff back to the pre-1.5.0 syntax
                with open(styl_fname, 'r') as fin:
                    with open(tmpfname, 'w') as fout:
                        fout.write(_cycler2prop_cycle(fin.read()))
                styl_fname = tmpfname
            params = rc_params_from_file(styl_fname, use_default_template=False)
            style.library[styl_name] = params

        if tmpfname:
            os.unlink(tmpfname)
        style.reload_library()

    except ImportError:
        if show_warning:
            logger.debug("Upgrade to matplotlib >= 1.5.0 to use style sheets")
示例#3
0
# If we have lxml, use it, else use a modification of the Python std lib xml

from viscid import logger

force_native_xml = True

try:
    if force_native_xml:
        raise ImportError

    from lxml import etree
    logger.debug("Using lxml library")

    def parse(fname, **kwargs):
        return etree.parse(fname, **kwargs)

    def xinclude(tree, base_url=None, **kwargs):
        """Summary

        Args:
            tree (Tree): The object returned by parse
            base_url (str): Not used
            **kwargs: passed to tree.xinclude()
        """
        # TODO: ignore if an xincluded xdmf file doesn't exist?
        if base_url:
            logger.warning("lxml will ignore base_url: %s", base_url)
        return tree.xinclude(**kwargs)

except ImportError:
    from xml.etree import ElementTree
示例#4
0
文件: vlab.py 项目: jobejen/Viscid
def _follow_fluid_step(i, dt, grid, root_seeds, plot_function, stream_opts,
                       speed_scale):
    direction = int(dt / np.abs(dt))
    if direction >= 0:
        sl_direction = streamline.DIR_FORWARD
    else:
        sl_direction = streamline.DIR_BACKWARD

    logger.info("working on timestep {0} {1}".format(i, grid.time))
    v = grid["v"]
    logger.debug("finished reading V field")

    logger.debug("calculating new streamline positions")
    flow_lines = calc_streamlines(v, root_seeds,
                                  output=viscid.OUTPUT_STREAMLINES,
                                  stream_dir=sl_direction,
                                  **stream_opts)[0]

    logger.debug("done with that, now i'm plotting...")
    plot_function(i, grid, v, flow_lines, root_seeds)

    ############################################################
    # now recalculate the seed positions for the next timestep
    logger.debug("finding new seed positions...")
    root_pts = root_seeds.genr_points()
    valid_pt_inds = []
    for i in range(root_pts.shape[1]):
        valid_pt = True

        # get the index of the root point in teh 2d flow line array
        # dist = flow_lines[i] - root_pts[:, [i]]
        # root_ind = np.argmin(np.sum(dist**2, axis=0))
        # print("!!!", root_pts[:, i], "==", flow_lines[i][:, root_ind])
        # interpolate velocity onto teh flow line, and get speed too
        v_interp = cycalc.interp_trilin(v, seed.Point(flow_lines[i]))
        speed = np.sqrt(np.sum(v_interp * v_interp, axis=1))

        # this is a super slopy way to integrate velocity
        # keep marching along the flow line until we get to the next timestep
        t = 0.0
        ind = 0
        if direction < 0:
            flow_lines[i] = flow_lines[i][:, ::-1]
            speed = speed[::-1]

        while t < np.abs(dt):
            ind += 1
            if ind >= len(speed):
                # set valid_pt to True if you want to keep that point for
                # future time steps, but most likely if we're here, the seed
                # has gone out of our region of interest
                ind = len(speed) - 1
                valid_pt = False
                logger.info("OOPS: ran out of streamline, increase "
                            "max_length when tracing flow lines if this "
                            "is unexpected")
                break
            t += stream_opts["ds0"] / (speed_scale * speed[ind])

        root_pts[:, i] = flow_lines[i][:, ind]
        if valid_pt:
            valid_pt_inds.append(i)

    # remove seeds that have flown out of our region of interest
    # (aka, beyond the flow line we drew)
    root_pts = root_pts[:, valid_pt_inds]

    logger.debug("ok, done with all that :)")
    return root_pts
示例#5
0
def _follow_fluid_step(i, dt, grid, root_seeds, plot_function, stream_opts,
                       speed_scale):
    direction = int(dt / np.abs(dt))
    if direction >= 0:
        sl_direction = streamline.DIR_FORWARD
    else:
        sl_direction = streamline.DIR_BACKWARD

    logger.info("working on timestep {0} {1}".format(i, grid.time))
    v = grid["v"]
    logger.debug("finished reading V field")

    logger.debug("calculating new streamline positions")
    flow_lines = calc_streamlines(v,
                                  root_seeds,
                                  output=viscid.OUTPUT_STREAMLINES,
                                  stream_dir=sl_direction,
                                  **stream_opts)[0]

    logger.debug("done with that, now i'm plotting...")
    plot_function(i, grid, v, flow_lines, root_seeds)

    ############################################################
    # now recalculate the seed positions for the next timestep
    logger.debug("finding new seed positions...")
    root_pts = root_seeds.genr_points()
    valid_pt_inds = []
    for i in range(root_pts.shape[1]):
        valid_pt = True

        # get the index of the root point in teh 2d flow line array
        # dist = flow_lines[i] - root_pts[:, [i]]
        # root_ind = np.argmin(np.sum(dist**2, axis=0))
        # print("!!!", root_pts[:, i], "==", flow_lines[i][:, root_ind])
        # interpolate velocity onto teh flow line, and get speed too
        v_interp = cycalc.interp_trilin(v, seed.Point(flow_lines[i]))
        speed = np.sqrt(np.sum(v_interp * v_interp, axis=1))

        # this is a super slopy way to integrate velocity
        # keep marching along the flow line until we get to the next timestep
        t = 0.0
        ind = 0
        if direction < 0:
            flow_lines[i] = flow_lines[i][:, ::-1]
            speed = speed[::-1]

        while t < np.abs(dt):
            ind += 1
            if ind >= len(speed):
                # set valid_pt to True if you want to keep that point for
                # future time steps, but most likely if we're here, the seed
                # has gone out of our region of interest
                ind = len(speed) - 1
                valid_pt = False
                logger.info("OOPS: ran out of streamline, increase "
                            "max_length when tracing flow lines if this "
                            "is unexpected")
                break
            t += stream_opts["ds0"] / (speed_scale * speed[ind])

        root_pts[:, i] = flow_lines[i][:, ind]
        if valid_pt:
            valid_pt_inds.append(i)

    # remove seeds that have flown out of our region of interest
    # (aka, beyond the flow line we drew)
    root_pts = root_pts[:, valid_pt_inds]

    logger.debug("ok, done with all that :)")
    return root_pts
示例#6
0
def include(elem, loader=None, base_url="./", _parent_hrefs=None):
    """ base_url is just a file path """
    if loader is None:
        loader = _xdmf_default_loader

    # TODO: for some nested includes, urljoin is adding an extra / which
    # means this way of detecting infinite recursion doesn't work
    if _parent_hrefs is None:
        _parent_hrefs = set()

    # look for xinclude elements
    i = 0
    while i < len(elem):
        e = elem[i]
        if e.tag == XINCLUDE_INCLUDE:
            # process xinclude directive
            href = e.get("href")
            href = urljoin(base_url, href)
            parse = e.get("parse", "xml")
            pointer = e.get("xpointer", None)

            if parse == "xml":
                if href in _parent_hrefs:
                    raise FatalIncludeError(
                        "recursive include of {0} detected".format(href)
                        )
                _parent_hrefs.add(href)
                node = loader(href, parse, e)

                if node is None:
                    logger.debug("XInclude: File '{0}' not found".format(href))
                    del elem[i]
                    continue

                # trying to use our limited xpointer / xpath support?
                if pointer is not None:
                    # really poor man's way of working around the fact that
                    # default etree can't do absolute xpaths
                    if pointer.startswith("xpointer("):
                        pointer = pointer[9:-1]
                    if pointer.startswith(node.tag):
                        pointer = "./" + "/".join(pointer.split("/")[1:])
                    if pointer.startswith("/" + node.tag):
                        pointer = "./" + "/".join(pointer.split("/")[2:])
                    if pointer.startswith("//" + node.tag):
                        pointer = ".//" + "/".join(pointer.split("/")[3:])
                    node = copy.copy(node.find(pointer))
                else:
                    node = copy.copy(node)

                # recursively look for xincludes in the included element
                include(node, loader, href, _parent_hrefs)

                if e.tail:
                    node.tail = (node.tail or "") + e.tail
                elem[i] = node

            elif parse == "text":
                text = loader(href, parse, e, e.get("encoding"))
                if text is None:
                    raise FatalIncludeError(
                        "cannot load %r as %r" % (href, parse)
                        )
                if i:
                    node = elem[i-1]
                    node.tail = (node.tail or "") + text
                else:
                    elem.text = (elem.text or "") + text + (e.tail or "")
                del elem[i]
                continue
            else:
                raise FatalIncludeError(
                    "unknown parse type in xi:include tag (%r)" % parse
                )
        else:
            include(e, loader, base_url, _parent_hrefs)
        i = i + 1
示例#7
0
    def read_field(self,
                   comp,
                   slicex=slice(None),
                   slicey=slice(None),
                   slicez=slice(None),
                   dtype=np.float32):
        with self as _:
            f_in = self._file
            fmt_1 = struct.Struct('=5b')
            CHAR_BITS, sz_short_int, sz_int, sz_float, \
                sz_double = fmt_1.unpack(f_in.read(fmt_1.size))
            #logger.debug(CHAR_BITS, sz_short_int, sz_int, sz_float, sz_double)
            assert CHAR_BITS == 8 and sz_short_int == 2
            assert sz_int == 4 and sz_float == 4 and sz_double == 8

            fmt_2 = struct.Struct('=HIfd')
            cafe, deadbeef, float_1, double_1 = fmt_2.unpack(
                f_in.read(fmt_2.size))
            #logger.debug("{:x} {:x}".format(cafe, deadbeef))
            assert cafe == 0xcafe and deadbeef == 0xdeadbeef
            assert float_1 == 1 and double_1 == 1

            fmt_3 = struct.Struct('=6i10f')
            version, dump_type, step, nx_out, ny_out, nz_out, \
                dt, dx, dy, dz, x0, y0, z0, \
                cvac, eps0, damp = fmt_3.unpack(f_in.read(fmt_3.size))
            logger.debug('version {0} dump_type {1}'.format(
                version, dump_type))
            logger.debug('step {0} n_out {1}'.format(step,
                                                     (nx_out, ny_out, nz_out)))
            logger.debug('dt {0} dx {1} x0 {2}'.format(dt, (dx, dy, dz),
                                                       (x0, y0, z0)))
            logger.debug('cvac {0} eps0 {1} damp {2}'.format(cvac, eps0, damp))

            fmt_4 = struct.Struct('=3i1f5i')
            rank, nproc, sp_id, q_m, sz_data, ndim, ghost_size_x, \
               ghost_size_y, ghost_size_z = fmt_4.unpack(f_in.read(fmt_4.size))
            logger.debug("rank {0} nproc {1}".format(rank, nproc))
            logger.debug("sp_id {0} q_m {1}".format(sp_id, q_m))
            logger.debug("sz_data {0} ndim {1} ghost_size {2}"
                         "".format(sz_data, ndim,
                                   (ghost_size_x, ghost_size_y, ghost_size_z)))

            dim = (ghost_size_x, ghost_size_y, ghost_size_z)
            # The material data are originally short ints
            # but are written as uint_32_t (of size same
            # as float32; see dump.cc:field_dump); this is
            # perhaps for convnience.
            # TODO: slice when reading?
            HEADER_SIZE = 123
            RECORD_SIZE = 4
            assert f_in.tell() == HEADER_SIZE

            f_in.seek(HEADER_SIZE + RECORD_SIZE * comp * np.prod(dim))
            data = np.fromfile(f_in, count=np.prod(dim), dtype=dtype)

            data = data.reshape(*dim, order='F')
            data = data[1:-1, 1:-1, 1:-1]
            data_sliced = data[slicez, slicey, slicex]

            return data_sliced
示例#8
0
# If we have lxml, use it, else use a modification of the Python std lib xml

from viscid import logger

force_native_xml = True

try:
    if force_native_xml:
        raise ImportError

    from lxml import etree
    logger.debug("Using lxml library")

    def parse(fname, **kwargs):
        return etree.parse(fname, **kwargs)

    def xinclude(tree, base_url=None, **kwargs):
        """Summary

        Args:
            tree (Tree): The object returned by parse
            base_url (str): Not used
            **kwargs: passed to tree.xinclude()
        """
        # TODO: ignore if an xincluded xdmf file doesn't exist?
        if base_url:
            logger.warn("lxml will ignore base_url: %s", base_url)
        return tree.xinclude(**kwargs)

except ImportError:
    from xml.etree import ElementTree