Пример #1
0
    def _apply_env_config(self):
        """
        This method checks the environment variables for any OKTA
        configuration parameters and applies them if available.
        """
        # Flatten current config and join with underscores
        # (for environment variable format)
        flattened_config = FlatDict(self._config, delimiter='_')
        flattened_keys = flattened_config.keys()

        # Create empty result config and populate
        updated_config = FlatDict({}, delimiter='_')

        # Go through keys and search for it in the environment vars
        # using the format described in the README
        for key in flattened_keys:
            env_key = ConfigSetter._OKTA + "_" + key.upper()
            env_value = os.environ.get(env_key, None)

            if env_value is not None:
                # If value is found, add to config
                if "scopes" in env_key.lower():
                    updated_config[key] = env_value.split(',')
                else:
                    updated_config[key] = env_value
            # apply to current configuration
        self._apply_config(updated_config.as_dict())
Пример #2
0
    def save(cls, network, phases=[], filename=''):
        r"""
        Write Network to a Mat file for exporting to Matlab.

        Parameters
        ----------
        network : OpenPNM Network Object

        filename : string
            Desired file name, defaults to network name if not given

        phases : list of phase objects ([])
            Phases that have properties we want to write to file

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        network = network[0]
        # Write to file
        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename=filename, ext='mat')

        d = Dict.to_dict(network=network, phases=phases, interleave=True)
        d = FlatDict(d, delimiter='|')
        d = sanitize_dict(d)
        new_d = {}
        for key in list(d.keys()):
            new_key = key.replace('|', '_').replace('.', '_')
            new_d[new_key] = d.pop(key)

        spio.savemat(file_name=filename, mdict=new_d)
Пример #3
0
    def save(cls, network, phases=[], filename=''):
        r"""
        Write Network to a Mat file for exporting to Matlab.

        Parameters
        ----------
        network : OpenPNM Network Object

        filename : string
            Desired file name, defaults to network name if not given

        phases : list of phase objects ([])
            Phases that have properties we want to write to file

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        network = network[0]
        # Write to file
        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename=filename, ext='mat')

        d = Dict.to_dict(network=network, phases=phases, interleave=True)
        d = FlatDict(d, delimiter='|')
        d = sanitize_dict(d)
        new_d = {}
        for key in list(d.keys()):
            new_key = key.replace('|', '_').replace('.', '_')
            new_d[new_key] = d.pop(key)

        spio.savemat(file_name=filename, mdict=new_d)
Пример #4
0
    def _prune_config(self, config):
        """
        This method cleans up the configuration object by removing fields
        with no value
        """
        # Flatten dictionary to account for nested dictionary
        flat_current_config = FlatDict(config, delimiter='_')
        # Iterate through keys and remove if value is still empty string
        for key in flat_current_config.keys():
            if flat_current_config.get(key) == '':
                del flat_current_config[key]

        return flat_current_config.as_dict()
    def process(self, config=None):
        if config is None:
            config = {}

        config = FlatDict(config, delimiter='_')
        environ_config = {}

        for key in config.keys():
            env_key = '_'.join([self.prefix, key.upper()])
            env_key = self.aliases.get(env_key, env_key)
            value = environ.get(env_key)

            if value:
                if isinstance(config[key], int):
                    value = int(value)

                environ_config[key] = value

        _extend_dict(config, environ_config)
        config = config.as_dict()

        return config
Пример #6
0
    def to_hdf5(cls, network=None, phases=[], element=['pore', 'throat'],
                filename='', interleave=True, flatten=False, categorize_by=[]):
        r"""
        Creates an HDF5 file containing data from the specified objects,
        and categorized according to the given arguments.

        Parameters
        ----------
        network : OpenPNM Network Object
            The network containing the desired data

        phases : list of OpenPNM Phase Objects (optional, default is none)
            A list of phase objects whose data are to be included

        element : string or list of strings
            An indication of whether 'pore' and/or 'throat' data are desired.
            The default is both.

        interleave : boolean (default is ``True``)
            When ``True`` (default) the data from all Geometry objects (and
            Physics objects if ``phases`` are given) is interleaved into
            a single array and stored as a network property (or Phase
            property for Physics data). When ``False``, the data for each
            object are stored under their own dictionary key, the structuring
            of which depends on the value of the ``flatten`` argument.

        flatten : boolean (default is ``True``)
            When ``True``, all objects are accessible from the top level
            of the dictionary.  When ``False`` objects are nested under their
            parent object.  If ``interleave`` is ``True`` this argument is
            ignored.

        categorize_by : string or list of strings
            Indicates how the dictionaries should be organized.  The list can
            contain any, all or none of the following strings:

            **'objects'** : If specified the dictionary keys will be stored
            under a general level corresponding to their type (e.g.
            'network/net_01/pore.all'). If  ``interleave`` is ``True`` then
            only the only categories are *network* and *phase*, since
            *geometry* and *physics* data get stored under their respective
            *network* and *phase*.

            **'data'** : If specified the data arrays are additionally
            categorized by ``label`` and ``property`` to separate *boolean*
            from *numeric* data.

            **'elements'** : If specified the data arrays are additionally
            categorized by ``pore`` and ``throat``, meaning that the propnames
            are no longer prepended by a 'pore.' or 'throat.'

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename, ext='hdf')

        dct = Dict.to_dict(network=network, phases=phases, element=element,
                           interleave=interleave, flatten=flatten,
                           categorize_by=categorize_by)
        d = FlatDict(dct, delimiter='/')

        f = h5py.File(filename, "w")
        for item in d.keys():
            tempname = '_'.join(item.split('.'))
            arr = d[item]
            if d[item].dtype == 'O':
                logger.warning(item + ' has dtype object,' +
                               ' will not write to file')
                del d[item]
            elif 'U' in str(arr[0].dtype):
                pass
            else:
                f.create_dataset(name='/'+tempname, shape=arr.shape,
                                 dtype=arr.dtype, data=arr)
        return f
Пример #7
0
    def save(cls, network, phases=[], filename='', delim=' | ', fill_nans=None):
        r"""
        Save network and phase data to a single vtp file for visualizing in
        Paraview

        Parameters
        ----------
        network : OpenPNM Network Object
            The Network containing the data to be written

        phases : list, optional
            A list containing OpenPNM Phase object(s) containing data to be
            written

        filename : string, optional
            Filename to write data.  If no name is given the file is named
            after the network

        delim : string
            Specify which character is used to delimit the data names.  The
            default is ' | ' which creates a nice clean output in the Paraview
            pipeline viewer (e.g. net | property | pore | diameter)

        fill_nans : scalar
            The value to use to replace NaNs with.  The VTK file format does
            not work with NaNs, so they must be dealt with.  The default is
            `None` which means property arrays with NaNs are not written to the
            file.  Other useful options might be 0 or -1, but the user must
            be aware that these are not real values, only place holders.

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)

        am = Dict.to_dict(network=network, phases=phases, interleave=True,
                          categorize_by=['object', 'data'])
        am = FlatDict(am, delimiter=delim)
        key_list = list(sorted(am.keys()))

        network = network[0]
        points = network['pore.coords']
        pairs = network['throat.conns']
        num_points = np.shape(points)[0]
        num_throats = np.shape(pairs)[0]

        root = ET.fromstring(VTK._TEMPLATE)
        piece_node = root.find('PolyData').find('Piece')
        piece_node.set("NumberOfPoints", str(num_points))
        piece_node.set("NumberOfLines", str(num_throats))
        points_node = piece_node.find('Points')
        coords = VTK._array_to_element("coords", points.T.ravel('F'), n=3)
        points_node.append(coords)
        lines_node = piece_node.find('Lines')
        connectivity = VTK._array_to_element("connectivity", pairs)
        lines_node.append(connectivity)
        offsets = VTK._array_to_element("offsets", 2*np.arange(len(pairs))+2)
        lines_node.append(offsets)

        point_data_node = piece_node.find('PointData')
        cell_data_node = piece_node.find('CellData')
        for key in key_list:
            array = am[key]
            if array.dtype == 'O':
                logger.warning(key + ' has dtype object,' +
                               ' will not write to file')
            else:
                if array.dtype == np.bool:
                    array = array.astype(int)
                if np.any(np.isnan(array)):
                    if fill_nans is None:
                        logger.warning(key + ' has nans,' +
                                       ' will not write to file')
                        continue
                    else:
                        array[np.isnan(array)] = fill_nans
                element = VTK._array_to_element(key, array)
                if (array.size == num_points):
                    point_data_node.append(element)
                elif (array.size == num_throats):
                    cell_data_node.append(element)

        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename=filename, ext='vtp')

        tree = ET.ElementTree(root)
        tree.write(filename)

        with open(filename, 'r+') as f:
            string = f.read()
            string = string.replace('</DataArray>', '</DataArray>\n\t\t\t')
            f.seek(0)
            # consider adding header: '<?xml version="1.0"?>\n'+
            f.write(string)
Пример #8
0
    def from_dict(cls, dct, project=None, delim=' | '):
        r"""
        This method converts a correctly formatted dictionary into OpenPNM
        objects, and returns a handle to the *project* containing them.

        Parameters
        ----------
        dct : dictionary
            The Python dictionary containing the data.  The nesting and
            labeling of the dictionary is used to create the appropriate
            OpenPNM objects.

        project : OpenPNM Project Object
            The project with which the created objects should be associated.
            If not supplied, one will be created.

        Returns
        -------
        An OpenPNM Project containing the objects created to store the given
        data.

        Notes
        -----
        The requirement of a *correctly formed* dictionary is rather strict,
        and essentially means a dictionary produced by the ``to_dict`` method
        of this class.

        """
        if project is None:
            project = ws.new_project()

        # Uncategorize pore/throat and labels/properties, if present
        fd = FlatDict(dct, delimiter=delim)
        # If . is the delimiter, replace with | otherwise things break
        if delim == '.':
            delim = ' | '
            for key in list(fd.keys()):
                new_key = key.replace('.', delim)
                fd[new_key] = fd.pop(key)
        d = FlatDict(delimiter=delim)
        for key in list(fd.keys()):
            new_key = key.replace('pore' + delim, 'pore.')
            new_key = new_key.replace('throat' + delim, 'throat.')
            new_key = new_key.replace('labels' + delim, '')
            new_key = new_key.replace('properties' + delim, '')
            d[new_key] = fd.pop(key)

        # Plase data into correctly categorized dicts, for later handling
        objs = {
            'network': NestedDict(),
            'geometry': NestedDict(),
            'physics': NestedDict(),
            'phase': NestedDict(),
            'algorithm': NestedDict(),
            'base': NestedDict()
        }
        for item in d.keys():
            path = item.split(delim)
            if len(path) > 2:
                if path[-3] in objs.keys():
                    # Item is categorized by type, so note it
                    objs[path[-3]][path[-2]][path[-1]] = d[item]
                else:
                    # item is nested, not categorized; make it a base
                    objs['base'][path[-2]][path[-1]] = d[item]
            else:
                # If not categorized by type, make it a base
                objs['base'][path[-2]][path[-1]] = d[item]

        # Convert to OpenPNM Objects, attempting to infer type
        for objtype in objs.keys():
            for name in objs[objtype].keys():
                # Create empty object, using dummy name to avoid error
                obj = project._new_object(objtype=objtype, name='')
                # Overwrite name
                obj._set_name(name=name, validate=False)
                # Update new object with data from dict
                obj.update(objs[objtype][name])

        return project
Пример #9
0
    def from_dict(cls, dct, project=None, delim=' | '):
        r"""
        This method converts a correctly formatted dictionary into OpenPNM
        objects, and returns a handle to the *project* containing them.

        Parameters
        ----------
        dct : dictionary
            The Python dictionary containing the data.  The nesting and
            labeling of the dictionary is used to create the appropriate
            OpenPNM objects.

        project : OpenPNM Project Object
            The project with which the created objects should be associated.
            If not supplied, one will be created.

        Returns
        -------
        An OpenPNM Project containing the objects created to store the given
        data.

        Notes
        -----
        The requirement of a *correctly formed* dictionary is rather strict,
        and essentially means a dictionary produced by the ``to_dict`` method
        of this class.

        """
        if project is None:
            project = ws.new_project()

        # Uncategorize pore/throat and labels/properties, if present
        fd = FlatDict(dct, delimiter=delim)
        # If . is the delimiter, replace with | otherwise things break
        if delim == '.':
            delim = ' | '
            for key in list(fd.keys()):
                new_key = key.replace('.', delim)
                fd[new_key] = fd.pop(key)
        d = FlatDict(delimiter=delim)
        for key in list(fd.keys()):
            new_key = key.replace('pore' + delim, 'pore.')
            new_key = new_key.replace('throat' + delim, 'throat.')
            new_key = new_key.replace('labels' + delim, '')
            new_key = new_key.replace('properties' + delim, '')
            d[new_key] = fd.pop(key)

        # Plase data into correctly categorized dicts, for later handling
        objs = {'network': NestedDict(),
                'geometry': NestedDict(),
                'physics': NestedDict(),
                'phase': NestedDict(),
                'algorithm': NestedDict(),
                'base': NestedDict()}
        for item in d.keys():
            path = item.split(delim)
            if len(path) > 2:
                if path[-3] in objs.keys():
                    # Item is categorized by type, so note it
                    objs[path[-3]][path[-2]][path[-1]] = d[item]
                else:
                    # item is nested, not categorized; make it a base
                    objs['base'][path[-2]][path[-1]] = d[item]
            else:
                # If not categorized by type, make it a base
                objs['base'][path[-2]][path[-1]] = d[item]

        # Convert to OpenPNM Objects, attempting to infer type
        for objtype in objs.keys():
            for name in objs[objtype].keys():
                # Create empty object, using dummy name to avoid error
                obj = project._new_object(objtype=objtype, name='')
                # Overwrite name
                obj._set_name(name=name, validate=False)
                # Update new object with data from dict
                obj.update(objs[objtype][name])

        return project
Пример #10
0
    def to_hdf5(cls,
                network=None,
                phases=[],
                element=['pore', 'throat'],
                filename='',
                interleave=True,
                flatten=False,
                categorize_by=[]):
        r"""
        Creates an HDF5 file containing data from the specified objects,
        and categorized according to the given arguments.

        Parameters
        ----------
        network : OpenPNM Network Object
            The network containing the desired data

        phases : list of OpenPNM Phase Objects (optional, default is none)
            A list of phase objects whose data are to be included

        element : string or list of strings
            An indication of whether 'pore' and/or 'throat' data are desired.
            The default is both.

        interleave : boolean (default is ``True``)
            When ``True`` (default) the data from all Geometry objects (and
            Physics objects if ``phases`` are given) is interleaved into
            a single array and stored as a network property (or Phase
            property for Physics data). When ``False``, the data for each
            object are stored under their own dictionary key, the structuring
            of which depends on the value of the ``flatten`` argument.

        flatten : boolean (default is ``True``)
            When ``True``, all objects are accessible from the top level
            of the dictionary.  When ``False`` objects are nested under their
            parent object.  If ``interleave`` is ``True`` this argument is
            ignored.

        categorize_by : string or list of strings
            Indicates how the dictionaries should be organized.  The list can
            contain any, all or none of the following strings:

            **'objects'** : If specified the dictionary keys will be stored
            under a general level corresponding to their type (e.g.
            'network/net_01/pore.all'). If  ``interleave`` is ``True`` then
            only the only categories are *network* and *phase*, since
            *geometry* and *physics* data get stored under their respective
            *network* and *phase*.

            **'data'** : If specified the data arrays are additionally
            categorized by ``label`` and ``property`` to separate *boolean*
            from *numeric* data.

            **'elements'** : If specified the data arrays are additionally
            categorized by ``pore`` and ``throat``, meaning that the propnames
            are no longer prepended by a 'pore.' or 'throat.'

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename, ext='hdf')

        dct = Dict.to_dict(network=network,
                           phases=phases,
                           element=element,
                           interleave=interleave,
                           flatten=flatten,
                           categorize_by=categorize_by)
        d = FlatDict(dct, delimiter='/')

        f = hdfFile(filename, "w")
        for item in d.keys():
            tempname = '_'.join(item.split('.'))
            arr = d[item]
            if d[item].dtype == 'O':
                logger.warning(item + ' has dtype object,' +
                               ' will not write to file')
                del d[item]
            elif 'U' in str(arr[0].dtype):
                pass
            else:
                f.create_dataset(name='/' + tempname,
                                 shape=arr.shape,
                                 dtype=arr.dtype,
                                 data=arr)
        return f
Пример #11
0
    def save(cls, network, phases=[], filename=''):
        r"""
        Saves (transient/steady-state) data from the given objects into the
        specified file.

        Parameters
        ----------
        network : OpenPNM Network Object
            The network containing the desired data

        phases : list of OpenPNM Phase Objects (optional, default is none)
            A list of phase objects whose data are to be included

        Notes
        -----
        This method only saves the data, not any of the pore-scale models or
        other attributes.  To save an actual OpenPNM Project use the
        ``Workspace`` object.

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        network = network[0]
        # Check if any of the phases has time series
        transient = GenericIO.is_transient(phases=phases)

        if filename == '':
            filename = project.name
        path = cls._parse_filename(filename=filename, ext='xmf')
        # Path is a pathlib object, so slice it up as needed
        fname_xdf = path.name
        d = Dict.to_dict(network, phases=phases, interleave=True,
                         flatten=False, categorize_by=['element', 'data'])
        D = FlatDict(d, delimiter='/')
        # Identify time steps
        t_steps = []
        if transient:
            for key in D.keys():
                if '@' in key:
                    t_steps.append(key.split('@')[1])
        t_grid = create_grid(Name="TimeSeries", GridType="Collection",
                             CollectionType="Temporal")
        # If steady-state, define '0' time step
        if not transient:
            t_steps.append('0')
        # Setup xdmf file
        root = create_root('Xdmf')
        domain = create_domain()
        # Iterate over time steps present
        for t in range(len(t_steps)):
            # Define the hdf file
            if not transient:
                fname_hdf = path.stem+".hdf"
            else:
                fname_hdf = path.stem+'@'+t_steps[t]+".hdf"
            path_p = path.parent
            f = h5py.File(path_p.joinpath(fname_hdf), "w")
            # Add coordinate and connection information to top of HDF5 file
            f["coordinates"] = network["pore.coords"]
            f["connections"] = network["throat.conns"]
            # geometry coordinates
            row, col = f["coordinates"].shape
            dims = ' '.join((str(row), str(col)))
            hdf_loc = fname_hdf + ":coordinates"
            geo_data = create_data_item(value=hdf_loc, Dimensions=dims,
                                        Format='HDF', DataType="Float")
            geo = create_geometry(GeometryType="XYZ")
            geo.append(geo_data)
            # topolgy connections
            row, col = f["connections"].shape  # col first then row
            dims = ' '.join((str(row), str(col)))
            hdf_loc = fname_hdf + ":connections"
            topo_data = create_data_item(value=hdf_loc, Dimensions=dims,
                                         Format="HDF", NumberType="Int")
            topo = create_topology(TopologyType="Polyline",
                                   NodesPerElement=str(2),
                                   NumberOfElements=str(row))
            topo.append(topo_data)
            # Make HDF5 file with all datasets, and no groups
            for item in D.keys():
                if D[item].dtype == 'O':
                    logger.warning(item + ' has dtype object,' +
                                   ' will not write to file')
                    del D[item]
                elif 'U' in str(D[item][0].dtype):
                    pass
                elif ('@' in item and t_steps[t] == item.split('@')[1]):
                    f.create_dataset(name='/'+item.split('@')[0]+'@t',
                                     shape=D[item].shape,
                                     dtype=D[item].dtype,
                                     data=D[item])
                elif ('@' not in item and t == 0):
                    f.create_dataset(name='/'+item, shape=D[item].shape,
                                     dtype=D[item].dtype, data=D[item])
            # Create a grid
            grid = create_grid(Name=t_steps[t], GridType="Uniform")
            time = create_time(type='Single', Value=t_steps[t])
            grid.append(time)
            # Add pore and throat properties
            for item in D.keys():
                if item not in ['coordinates', 'connections']:
                    if (('@' in item and t_steps[t] == item.split('@')[1]) or
                            ('@' not in item)):
                        attr_type = 'Scalar'
                        shape = D[item].shape
                        dims = (''.join([str(i) +
                                         ' ' for i in list(shape)[::-1]]))
                        if '@' in item:
                            item = item.split('@')[0]+'@t'
                            hdf_loc = fname_hdf + ":" + item
                        elif ('@' not in item and t == 0):
                            hdf_loc = fname_hdf + ":" + item
                        elif ('@' not in item and t > 0):
                            hdf_loc = (path.stem+'@'+t_steps[0]+".hdf" +
                                       ":" + item)
                        attr = create_data_item(value=hdf_loc,
                                                Dimensions=dims,
                                                Format='HDF',
                                                Precision='8',
                                                DataType='Float')
                        name = item.replace('/', ' | ')
                        if 'throat' in item:
                            Center = "Cell"
                        else:
                            Center = "Node"
                        el_attr = create_attribute(Name=name, Center=Center,
                                                   AttributeType=attr_type)
                        el_attr.append(attr)
                        grid.append(el_attr)
                    else:
                        pass
            grid.append(topo)
            grid.append(geo)
            t_grid.append(grid)
            # CLose the HDF5 file
            f.close()
        domain.append(t_grid)
        root.append(domain)
        with open(path_p.joinpath(fname_xdf), 'w') as file:
            file.write(cls._header)
            file.write(ET.tostring(root).decode("utf-8"))
Пример #12
0
    def save(cls, network, phases=[], filename=''):
        r"""
        Saves data from the given objects into the specified file.

        Parameters
        ----------
        network : OpenPNM Network Object
            The network containing the desired data

        phases : list of OpenPNM Phase Objects (optional, default is none)
            A list of phase objects whose data are to be included

        Notes
        -----
        This method only saves the data, not any of the pore-scale models or
        other attributes.  To save an actual OpenPNM Project use the
        ``Workspace`` object.

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        network = network[0]

        if filename == '':
            filename = project.name
        path = cls._parse_filename(filename=filename, ext='xmf')
        # Path is a pathlib object, so slice it up as needed
        fname_xdf = path.name
        fname_hdf = path.stem+".hdf"
        path = path.parent
        f = h5py.File(path.joinpath(fname_hdf), "w")

        d = Dict.to_dict(network, phases=phases, interleave=True,
                         flatten=False, categorize_by=['element', 'data'])

        # Make HDF5 file with all datasets, and no groups
        D = FlatDict(d, delimiter='/')
        for item in D.keys():
            if D[item].dtype == 'O':
                logger.warning(item + ' has dtype object,' +
                               ' will not write to file')
                del D[item]
            elif 'U' in str(D[item][0].dtype):
                pass
            else:
                f.create_dataset(name='/'+item, shape=D[item].shape,
                                 dtype=D[item].dtype, data=D[item])
        # Add coordinate and connection information to top of HDF5 file
        f["coordinates"] = network["pore.coords"]
        f["connections"] = network["throat.conns"]

        # setup xdmf file
        root = create_root('Xdmf')
        domain = create_domain()
        grid = create_grid(Name="Structure", GridType="Uniform")

        # geometry coordinates
        row, col = f["coordinates"].shape
        dims = ' '.join((str(row), str(col)))
        hdf_loc = fname_hdf + ":coordinates"
        geo_data = create_data_item(value=hdf_loc, Dimensions=dims,
                                    Format='HDF', DataType="Float")
        geo = create_geometry(GeometryType="XYZ")
        geo.append(geo_data)

        # topolgy connections
        row, col = f["connections"].shape  # col first then row
        dims = ' '.join((str(row), str(col)))
        hdf_loc = fname_hdf + ":connections"
        topo_data = create_data_item(value=hdf_loc, Dimensions=dims,
                                     Format="HDF", NumberType="Int")
        topo = create_topology(TopologyType="Polyline",
                               NodesPerElement=str(2),
                               NumberOfElements=str(row))
        topo.append(topo_data)

        # Add pore and throat properties
        for item in D.keys():
            if item not in ['coordinates', 'connections']:
                attr_type = 'Scalar'
                shape = f[item].shape
                dims = ''.join([str(i) + ' ' for i in list(shape)[::-1]])
                hdf_loc = fname_hdf + ":" + item
                attr = create_data_item(value=hdf_loc,
                                        Dimensions=dims,
                                        Format='HDF',
                                        Precision='8',
                                        DataType='Float')
                name = item.replace('/', ' | ')
                if 'throat' in item:
                    Center = "Cell"
                else:
                    Center = "Node"
                el_attr = create_attribute(Name=name, Center=Center,
                                           AttributeType=attr_type)
                el_attr.append(attr)
                grid.append(el_attr)

        grid.append(topo)
        grid.append(geo)
        domain.append(grid)
        root.append(domain)

        with open(path.joinpath(fname_xdf), 'w') as file:
            file.write(cls._header)
            file.write(ET.tostring(root).decode("utf-8"))

        # CLose the HDF5 file
        f.close()
Пример #13
0
    def save(cls, network, phases=[], filename='', delim=' | ',
             fill_nans=None, fill_infs=None):
        r"""
        Save network and phase data to a single vtp file for visualizing in
        Paraview
        Parameters
        ----------
        network : OpenPNM Network Object
            The Network containing the data to be written
        phases : list, optional
            A list containing OpenPNM Phase object(s) containing data to be
            written
        filename : string, optional
            Filename to write data.  If no name is given the file is named
            after the network
        delim : string
            Specify which character is used to delimit the data names.  The
            default is ' | ' which creates a nice clean output in the Paraview
            pipeline viewer (e.g. net | property | pore | diameter)
        fill_nans : scalar
            The value to use to replace NaNs with.  The VTK file format does
            not work with NaNs, so they must be dealt with.  The default is
            `None` which means property arrays with NaNs are not written to the
            file.  Other useful options might be 0 or -1, but the user must
            be aware that these are not real values, only place holders.
        fill_infs : scalar
            The value to use to replace infs with.  The default is ``None``
            which means that property arrays containing ``None`` will *not*
            be written to the file, and a warning will be issued.  A useful
            value is
        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        # Check if any of the phases has time series
        transient = GenericIO.is_transient(phases=phases)
        if transient:
            logger.warning('vtp format does not support transient data, ' +
                           'use xdmf instead')
        if filename == '':
            filename = project.name
        filename = cls._parse_filename(filename=filename, ext='vtp')

        am = Dict.to_dict(network=network, phases=phases, interleave=True,
                          categorize_by=['object', 'data'])
        am = FlatDict(am, delimiter=delim)
        key_list = list(sorted(am.keys()))

        network = network[0]
        points = network['pore.coords']
        pairs = network['throat.conns']
        num_points = np.shape(points)[0]
        num_throats = np.shape(pairs)[0]

        root = ET.fromstring(VTK._TEMPLATE)
        piece_node = root.find('PolyData').find('Piece')
        piece_node.set("NumberOfPoints", str(num_points))
        piece_node.set("NumberOfLines", str(num_throats))
        points_node = piece_node.find('Points')
        coords = VTK._array_to_element("coords", points.T.ravel('F'), n=3)
        points_node.append(coords)
        lines_node = piece_node.find('Lines')
        connectivity = VTK._array_to_element("connectivity", pairs)
        lines_node.append(connectivity)
        offsets = VTK._array_to_element("offsets", 2*np.arange(len(pairs))+2)
        lines_node.append(offsets)

        point_data_node = piece_node.find('PointData')
        cell_data_node = piece_node.find('CellData')
        for key in key_list:
            array = am[key]
            if array.dtype == 'O':
                logger.warning(key + ' has dtype object,' +
                               ' will not write to file')
            else:
                if array.dtype == np.bool:
                    array = array.astype(int)
                if np.any(np.isnan(array)):
                    if fill_nans is None:
                        logger.warning(key + ' has nans,' +
                                       ' will not write to file')
                        continue
                    else:
                        array[np.isnan(array)] = fill_nans
                if np.any(np.isinf(array)):
                    if fill_infs is None:
                        logger.warning(key + ' has infs,' +
                                       ' will not write to file')
                        continue
                    else:
                        array[np.isinf(array)] = fill_infs
                element = VTK._array_to_element(key, array)
                if (array.size == num_points):
                    point_data_node.append(element)
                elif (array.size == num_throats):
                    cell_data_node.append(element)

        tree = ET.ElementTree(root)
        tree.write(filename)

        with open(filename, 'r+') as f:
            string = f.read()
            string = string.replace('</DataArray>', '</DataArray>\n\t\t\t')
            f.seek(0)
            # consider adding header: '<?xml version="1.0"?>\n'+
            f.write(string)
Пример #14
0
    def export_data(cls, network, filename):
        r"""
        Exports an OpenPNM network to a paraview state file.

        Parameters
        ----------
        network : GenericNetwork
            The network containing the desired data.
        filename : str
            Path to saved .vtp file.

        Notes
        -----
        Outputs a pvsm file that can be opened in Paraview. The pvsm file will
        be saved with the same name as .vtp file

        """
        try:
            import paraview.simple
        except ModuleNotFoundError:
            msg = (
                "The paraview python bindings must be installed using "
                "conda install -c conda-forge paraview, however this may require"
                " using a virtualenv since conflicts with other packages are common."
                " This is why it is not explicitly included as a dependency in"
                " porespy.")
            raise ModuleNotFoundError(msg)
        paraview.simple._DisableFirstRenderCameraReset()
        file = os.path.splitext(filename)[0]
        x, y, z = np.ptp(network.coords, axis=0)
        if sum(op.topotools.dimensionality(network)) == 2:
            zshape = 0
            xshape = y
            yshape = x
        elif sum(op.topotools.dimensionality(network)) == 3:
            zshape = x
            xshape = z
            yshape = y
        maxshape = max(xshape, yshape, zshape)
        shape = np.array([xshape, yshape, zshape])
        # Create a new 'XML PolyData Reader'
        Path = os.getcwd() + "\\" + file + '.vtp'
        water = op.phases.Water(network=network)
        net_vtp = paraview.simple.XMLPolyDataReader(FileName=[Path])
        p = op.io.Dict.to_dict(network,
                               phases=[water],
                               element=['pore'],
                               flatten=False,
                               categorize_by=['data'
                                              'object'])
        p = FlatDict(p, delimiter=' | ')
        t = op.io.Dict.to_dict(network,
                               phases=[water],
                               element=['throat'],
                               flatten=False,
                               categorize_by=['data'
                                              'object'])
        t = FlatDict(t, delimiter=' | ')
        net_vtp.CellArrayStatus = t.keys()
        net_vtp.PointArrayStatus = p.keys()
        # Get active view
        render_view = paraview.simple.GetActiveViewOrCreate('RenderView')
        # Uncomment following to set a specific view size
        # render_view.ViewSize = [1524, 527]
        # Get layout
        _ = paraview.simple.GetLayout()
        # Show data in view
        net_vtp_display = paraview.simple.Show(net_vtp, render_view,
                                               'GeometryRepresentation')
        # Trace defaults for the display properties.
        net_vtp_display.Representation = 'Surface'
        net_vtp_display.ColorArrayName = [None, '']
        net_vtp_display.OSPRayScaleArray = [
            f'network | {network.name} | labels | pore.all'
        ]
        net_vtp_display.OSPRayScaleFunction = 'PiecewiseFunction'
        net_vtp_display.SelectOrientationVectors = 'None'
        net_vtp_display.ScaleFactor = (maxshape - 1) / 10
        net_vtp_display.SelectScaleArray = 'None'
        net_vtp_display.GlyphType = 'Arrow'
        net_vtp_display.GlyphTableIndexArray = 'None'
        net_vtp_display.GaussianRadius = (maxshape - 1) / 200
        net_vtp_display.SetScaleArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        net_vtp_display.ScaleTransferFunction = 'PiecewiseFunction'
        net_vtp_display.OpacityArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        net_vtp_display.OpacityTransferFunction = 'PiecewiseFunction'
        net_vtp_display.DataAxesGrid = 'GridAxesRepresentation'
        net_vtp_display.PolarAxes = 'PolarAxesRepresentation'
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        net_vtp_display.ScaleTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        net_vtp_display.OpacityTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Reset view to fit data
        render_view.ResetCamera()
        # Get the material library
        _ = paraview.simple.GetMaterialLibrary()
        # Update the view to ensure updated data information
        render_view.Update()
        # Create a new 'Glyph'
        glyph = paraview.simple.Glyph(Input=net_vtp, GlyphType='Arrow')
        glyph.OrientationArray = ['POINTS', 'No orientation array']
        glyph.ScaleArray = ['POINTS', 'No scale array']
        glyph.ScaleFactor = 1
        glyph.GlyphTransform = 'Transform2'
        # Properties modified on glyph
        glyph.GlyphType = 'Sphere'
        glyph.ScaleArray = [
            'POINTS',
            'network | ' + network.name + ' | properties | pore.diameter'
        ]
        # Show data in view
        glyph_display = paraview.simple.Show(glyph, render_view,
                                             'GeometryRepresentation')
        # Trace defaults for the display properties.
        glyph_display.Representation = 'Surface'
        glyph_display.ColorArrayName = [None, '']
        glyph_display.OSPRayScaleArray = 'Normals'
        glyph_display.OSPRayScaleFunction = 'PiecewiseFunction'
        glyph_display.SelectOrientationVectors = 'None'
        glyph_display.ScaleFactor = (maxshape - 1) / 10
        glyph_display.SelectScaleArray = 'None'
        glyph_display.GlyphType = 'Arrow'
        glyph_display.GlyphTableIndexArray = 'None'
        glyph_display.GaussianRadius = (maxshape - 1) / 200
        glyph_display.SetScaleArray = ['POINTS', 'Normals']
        glyph_display.ScaleTransferFunction = 'PiecewiseFunction'
        glyph_display.OpacityArray = ['POINTS', 'Normals']
        glyph_display.OpacityTransferFunction = 'PiecewiseFunction'
        glyph_display.DataAxesGrid = 'GridAxesRepresentation'
        glyph_display.PolarAxes = 'PolarAxesRepresentation'
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        glyph_display.ScaleTransferFunction.Points = [
            -0.97, 0, 0.5, 0, 0.97, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        glyph_display.OpacityTransferFunction.Points = [
            -0.97, 0, 0.5, 0, 0.97, 1, 0.5, 0
        ]
        # Update the view to ensure updated data information
        render_view.Update()
        # Set active source
        paraview.simple.SetActiveSource(net_vtp)
        # Create a new 'Shrink'
        shrink1 = paraview.simple.Shrink(Input=net_vtp)
        # Properties modified on shrink1
        shrink1.ShrinkFactor = 1.0
        # Show data in view
        shrink_display = paraview.simple.Show(
            shrink1, render_view, 'UnstructuredGridRepresentation')
        # Trace defaults for the display properties.
        shrink_display.Representation = 'Surface'
        shrink_display.ColorArrayName = [None, '']
        shrink_display.OSPRayScaleArray = [
            'network | ' + network.name + ' | labels | pore.all'
        ]
        shrink_display.OSPRayScaleFunction = 'PiecewiseFunction'
        shrink_display.SelectOrientationVectors = 'None'
        shrink_display.ScaleFactor = (maxshape - 1) / 10
        shrink_display.SelectScaleArray = 'None'
        shrink_display.GlyphType = 'Arrow'
        shrink_display.GlyphTableIndexArray = 'None'
        shrink_display.GaussianRadius = (maxshape - 1) / 200
        shrink_display.SetScaleArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        shrink_display.ScaleTransferFunction = 'PiecewiseFunction'
        shrink_display.OpacityArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        shrink_display.OpacityTransferFunction = 'PiecewiseFunction'
        shrink_display.DataAxesGrid = 'GridAxesRepresentation'
        shrink_display.PolarAxes = 'PolarAxesRepresentation'
        shrink_display.ScalarOpacityUnitDistance = 1.0349360947089783
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        shrink_display.ScaleTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        shrink_display.OpacityTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Hide data in view
        paraview.simple.Hide(net_vtp, render_view)
        # Update the view to ensure updated data information
        render_view.Update()
        # Create a new 'Cell Data to Point Data'
        cellDatatoPointData1 = paraview.simple.CellDatatoPointData(
            Input=shrink1)
        cellDatatoPointData1.CellDataArraytoprocess = t
        # Show data in view
        cell_data_to_point_data_display = paraview.simple.Show(
            cellDatatoPointData1, render_view,
            'UnstructuredGridRepresentation')
        # Trace defaults for the display properties.
        cell_data_to_point_data_display.Representation = 'Surface'
        cell_data_to_point_data_display.ColorArrayName = [None, '']
        cell_data_to_point_data_display.OSPRayScaleArray = [
            'network | ' + network.name + ' | labels | pore.all'
        ]
        cell_data_to_point_data_display.OSPRayScaleFunction = 'PiecewiseFunction'
        cell_data_to_point_data_display.SelectOrientationVectors = 'None'
        cell_data_to_point_data_display.ScaleFactor = (maxshape - 1) / 10
        cell_data_to_point_data_display.SelectScaleArray = 'None'
        cell_data_to_point_data_display.GlyphType = 'Arrow'
        cell_data_to_point_data_display.GlyphTableIndexArray = 'None'
        cell_data_to_point_data_display.GaussianRadius = (maxshape - 1) / 200
        cell_data_to_point_data_display.SetScaleArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        cell_data_to_point_data_display.ScaleTransferFunction = 'PiecewiseFunction'
        cell_data_to_point_data_display.OpacityArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        cell_data_to_point_data_display.OpacityTransferFunction = 'PiecewiseFunction'
        cell_data_to_point_data_display.DataAxesGrid = 'GridAxesRepresentation'
        cell_data_to_point_data_display.PolarAxes = 'PolarAxesRepresentation'
        cell_data_to_point_data_display.ScalarOpacityUnitDistance = 1.0349360947089783
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        cell_data_to_point_data_display.ScaleTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        cell_data_to_point_data_display.OpacityTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Hide data in view
        paraview.simple.Hide(shrink1, render_view)
        # Update the view to ensure updated data information
        render_view.Update()
        # Set active source
        paraview.simple.SetActiveSource(shrink1)
        # Set active source
        paraview.simple.SetActiveSource(cellDatatoPointData1)
        # Set active source
        paraview.simple.SetActiveSource(shrink1)
        # Create a new 'Extract Surface'
        extractSurface1 = paraview.simple.ExtractSurface(Input=shrink1)
        # Show data in view
        extract_surface_display = paraview.simple.Show(
            extractSurface1, render_view, 'GeometryRepresentation')
        # Trace defaults for the display properties.
        extract_surface_display.Representation = 'Surface'
        extract_surface_display.ColorArrayName = [None, '']
        extract_surface_display.OSPRayScaleArray = [
            'network | ' + network.name + ' | labels | pore.all'
        ]
        extract_surface_display.OSPRayScaleFunction = 'PiecewiseFunction'
        extract_surface_display.SelectOrientationVectors = 'None'
        extract_surface_display.ScaleFactor = (maxshape - 1) / 10
        extract_surface_display.SelectScaleArray = 'None'
        extract_surface_display.GlyphType = 'Arrow'
        extract_surface_display.GlyphTableIndexArray = 'None'
        extract_surface_display.GaussianRadius = (maxshape - 1) / 200
        extract_surface_display.SetScaleArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        extract_surface_display.ScaleTransferFunction = 'PiecewiseFunction'
        extract_surface_display.OpacityArray = [
            'POINTS', 'network | ' + network.name + ' | labels | pore.all'
        ]
        extract_surface_display.OpacityTransferFunction = 'PiecewiseFunction'
        extract_surface_display.DataAxesGrid = 'GridAxesRepresentation'
        extract_surface_display.PolarAxes = 'PolarAxesRepresentation'
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        extract_surface_display.ScaleTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        extract_surface_display.OpacityTransferFunction.Points = [
            1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Hide data in view
        paraview.simple.Hide(shrink1, render_view)
        # Update the view to ensure updated data information
        render_view.Update()
        # create a new 'Tube'
        tube = paraview.simple.Tube(Input=extractSurface1)
        tube.Scalars = [
            'POINTS', 'network |' + network.name + '| labels | pore.all'
        ]
        tube.Vectors = [None, '1']
        tube.Radius = 0.04
        # Set active source
        paraview.simple.SetActiveSource(extractSurface1)
        # Destroy tube
        paraview.simple.Delete(tube)
        del tube
        # Set active source
        paraview.simple.SetActiveSource(shrink1)
        # Set active source
        paraview.simple.SetActiveSource(cellDatatoPointData1)
        # Set active source
        paraview.simple.SetActiveSource(extractSurface1)
        # Create a new 'Tube'
        tube = paraview.simple.Tube(Input=extractSurface1)
        tube.Scalars = [
            'POINTS', 'network |' + network.name + ' | labels | pore.all'
        ]
        tube.Vectors = [None, '1']
        tube.Radius = 0.04
        # Properties modified on tube
        tube.Vectors = ['POINTS', '1']
        # Show data in view
        tube_display = paraview.simple.Show(tube, render_view,
                                            'GeometryRepresentation')
        # Trace defaults for the display properties.
        tube_display.Representation = 'Surface'
        tube_display.ColorArrayName = [None, '']
        tube_display.OSPRayScaleArray = 'TubeNormals'
        tube_display.OSPRayScaleFunction = 'PiecewiseFunction'
        tube_display.SelectOrientationVectors = 'None'
        tube_display.ScaleFactor = (maxshape) / 10
        tube_display.SelectScaleArray = 'None'
        tube_display.GlyphType = 'Arrow'
        tube_display.GlyphTableIndexArray = 'None'
        tube_display.GaussianRadius = (maxshape) / 200
        tube_display.SetScaleArray = ['POINTS', 'TubeNormals']
        tube_display.ScaleTransferFunction = 'PiecewiseFunction'
        tube_display.OpacityArray = ['POINTS', 'TubeNormals']
        tube_display.OpacityTransferFunction = 'PiecewiseFunction'
        tube_display.DataAxesGrid = 'GridAxesRepresentation'
        tube_display.PolarAxes = 'PolarAxesRepresentation'
        # Init the 'PiecewiseFunction' selected for 'ScaleTransferFunction'
        tube_display.ScaleTransferFunction.Points = [
            -1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Init the 'PiecewiseFunction' selected for 'OpacityTransferFunction'
        tube_display.OpacityTransferFunction.Points = [
            -1, 0, 0.5, 0, 1, 1, 0.5, 0
        ]
        # Hide data in view
        paraview.simple.Hide(extractSurface1, render_view)
        # Update the view to ensure updated data information
        render_view.Update()
        # Saving camera placements for all active views
        # Current camera placement for render_view
        render_view.CameraPosition = [(xshape + 1) / 2, (yshape + 1) / 2,
                                      4.3 * np.sqrt(np.sum(shape / 2)**2)]
        render_view.CameraFocalPoint = [(xi + 1) / 2 for xi in shape]
        render_view.CameraParallelScale = np.sqrt(np.sum(shape / 2)**2)
        paraview.simple.SaveState(f"{file}.pvsm")
Пример #15
0
    def to_dataframe(cls, network=None, phases=[], join=False, delim=' | '):
        r"""
        Convert the Network (and optionally Phase) data to Pandas DataFrames.

        Parameters
        ----------
        network: OpenPNM Network Object
            The network containing the data to be stored

        phases : list of OpenPNM Phase Objects
            The data on each supplied phase will be added to DataFrame

        join : boolean
            If ``False`` (default), two DataFrames are returned with *pore*
            data in one, and *throat* data in the other.  If ``True`` the pore
            and throat data are combined into a single DataFrame.  This can be
            problematic as it will put NaNs into all the *pore* columns which
            are shorter than the *throat* columns.

        Returns
        -------
        Pandas ``DataFrame`` object containing property and label data in each
        column.  If ``join`` was False (default) the two DataFrames are
        returned i a named tuple, or else a single DataFrame with pore and
        throat data in the same file, despite the column length being
        different.

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)

        # Initialize pore and throat data dictionary using Dict class
        pdata = Dict.to_dict(network=network,
                             phases=phases,
                             element='pore',
                             interleave=True,
                             flatten=True,
                             categorize_by=['object'])
        tdata = Dict.to_dict(network=network,
                             phases=phases,
                             element='throat',
                             interleave=True,
                             flatten=True,
                             categorize_by=['object'])
        pdata = FlatDict(pdata, delimiter=delim)
        tdata = FlatDict(tdata, delimiter=delim)

        # Scan data and convert non-1d arrays to multiple columns
        for key in list(pdata.keys()):
            if sp.shape(pdata[key]) != (network[0].Np, ):
                arr = pdata.pop(key)
                tmp = sp.split(arr, arr.shape[1], axis=1)
                cols = range(len(tmp))
                pdata.update(
                    {key + '[' + str(i) + ']': tmp[i].squeeze()
                     for i in cols})
        for key in list(tdata.keys()):
            if sp.shape(tdata[key]) != (network[0].Nt, ):
                arr = tdata.pop(key)
                tmp = sp.split(arr, arr.shape[1], axis=1)
                cols = range(len(tmp))
                tdata.update(
                    {key + '[' + str(i) + ']': tmp[i].squeeze()
                     for i in cols})

        # Convert sanitized dictionaries to DataFrames
        pdata = pd.DataFrame(sanitize_dict(pdata))
        tdata = pd.DataFrame(sanitize_dict(tdata))

        # Prepare DataFrames to be returned
        if join:
            data = tdata.join(other=pdata, how='left')
        else:
            nt = namedtuple('dataframes', ('pore', 'throat'))
            data = nt(pore=pdata, throat=tdata)

        return data
Пример #16
0
    def export_data(cls,
                    network,
                    phases=[],
                    filename="",
                    delim=" | ",
                    fill_nans=None,
                    fill_infs=None):
        r"""
        Save network and phase data to a single vtp file for visualizing in
        Paraview.

        Parameters
        ----------
        network : OpenPNM Network Object
            The Network containing the data to be written
        phases : list, optional
            A list containing OpenPNM Phase object(s) containing data to be
            written
        filename : string, optional
            Filename to write data.  If no name is given the file is named
            after the network
        delim : string
            Specify which character is used to delimit the data names.  The
            default is ' | ' which creates a nice clean output in the Paraview
            pipeline viewer (e.g. net | property | pore | diameter)
        fill_nans : scalar
            The value to use to replace NaNs with.  The VTK file format does
            not work with NaNs, so they must be dealt with.  The default is
            `None` which means property arrays with NaNs are not written to the
            file.  Other useful options might be 0 or -1, but the user must
            be aware that these are not real values, only place holders.
        fill_infs : scalar
            The value to use to replace infs with.  The default is ``None``
            which means that property arrays containing ``None`` will *not*
            be written to the file, and a warning will be issued.  A useful
            value is

        """
        project, network, phases = cls._parse_args(network=network,
                                                   phases=phases)
        # Check if any of the phases has time series
        transient = GenericIO._is_transient(phases=phases)
        if transient:
            logger.warning("vtp format does not support transient data, " +
                           "use xdmf instead")
        if filename == "":
            filename = project.name
        filename = cls._parse_filename(filename=filename, ext="vtp")

        am = Dict.to_dict(
            network=network,
            phases=phases,
            interleave=True,
            categorize_by=["object", "data"],
        )
        am = FlatDict(am, delimiter=delim)
        key_list = list(sorted(am.keys()))

        network = network[0]
        points = network["pore.coords"]
        pairs = network["throat.conns"]
        num_points = np.shape(points)[0]
        num_throats = np.shape(pairs)[0]

        root = ET.fromstring(VTK._TEMPLATE)
        piece_node = root.find("PolyData").find("Piece")
        piece_node.set("NumberOfPoints", str(num_points))
        piece_node.set("NumberOfLines", str(num_throats))
        points_node = piece_node.find("Points")
        coords = VTK._array_to_element("coords", points.T.ravel("F"), n=3)
        points_node.append(coords)
        lines_node = piece_node.find("Lines")
        connectivity = VTK._array_to_element("connectivity", pairs)
        lines_node.append(connectivity)
        offsets = VTK._array_to_element("offsets",
                                        2 * np.arange(len(pairs)) + 2)
        lines_node.append(offsets)

        point_data_node = piece_node.find("PointData")
        cell_data_node = piece_node.find("CellData")
        for key in key_list:
            array = am[key]
            if array.dtype == "O":
                logger.warning(key + " has dtype object," +
                               " will not write to file")
            else:
                if array.dtype == np.bool:
                    array = array.astype(int)
                if np.any(np.isnan(array)):
                    if fill_nans is None:
                        logger.warning(key + " has nans," +
                                       " will not write to file")
                        continue
                    else:
                        array[np.isnan(array)] = fill_nans
                if np.any(np.isinf(array)):
                    if fill_infs is None:
                        logger.warning(key + " has infs," +
                                       " will not write to file")
                        continue
                    else:
                        array[np.isinf(array)] = fill_infs
                element = VTK._array_to_element(key, array)
                if array.size == num_points:
                    point_data_node.append(element)
                elif array.size == num_throats:
                    cell_data_node.append(element)

        tree = ET.ElementTree(root)
        tree.write(filename)

        with open(filename, "r+") as f:
            string = f.read()
            string = string.replace("</DataArray>", "</DataArray>\n\t\t\t")
            f.seek(0)
            # consider adding header: '<?xml version="1.0"?>\n'+
            f.write(string)