def import_data(cls, filename, project=None, delim=' | '): r""" Opens a 'csv' file, reads in the data, and adds it to the **Network** Parameters ---------- filename : string (optional) The name of the file containing the data to import. The formatting of this file is outlined below. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project object is supplied then one will be created and returned. Returns ------- project : list An OpenPNM project containing the data assigned to Generic versions of the objects from which it was exported. """ from pandas import read_table if project is None: project = ws.new_project() fname = cls._parse_filename(filename, ext='csv') a = read_table(filepath_or_buffer=fname, sep=',', skipinitialspace=True, index_col=False, true_values=['T', 't', 'True', 'true', 'TRUE'], false_values=['F', 'f', 'False', 'false', 'FALSE']) dct = {} # First parse through all the items and re-merge columns keys = sorted(list(a.keys())) for item in keys: m = re.search(r'\[.\]', item) # The dot '.' is a wildcard if m: # m is None if pattern not found, otherwise merge cols pname = re.split(r'\[.\]', item)[0] # Get base propname # Find all other keys with same base propname merge_keys = [k for k in a.keys() if k.startswith(pname)] # Rerieve and remove arrays with same base propname merge_cols = [a.pop(k) for k in merge_keys] # Merge arrays into multi-column array and store in DataFrame dct[pname] = np.vstack(merge_cols).T # Remove key from list of keys for k in keys: if k.startswith(pname): keys.pop(keys.index(k)) else: dct[item] = np.array(a.pop(item)) project = Dict.from_dict(dct, project=project, delim=delim) return project
def test_from_dict_not_interleaved_not_flatted_not_categorized(self): D = Dict.to_dict(network=self.net, phases=[self.phase_1], flatten=False, interleave=False, categorize_by=[]) proj = Dict.from_dict(D) assert len(proj) == 6 assert len(proj.geometries().values()) == 0 assert len(proj.phases().values()) == 0 assert len(proj.physics().values()) == 0
def test_from_dict_interleaved_categorized_by_object(self): D = Dict.to_dict(network=self.net, phases=[self.phase_1], flatten=False, interleave=True, categorize_by=['object']) proj = Dict.from_dict(D) assert len(proj) == 2 assert len(proj.geometries().values()) == 0 assert len(proj.phases().values()) == 1 assert len(proj.physics().values()) == 0
def test_from_dict_not_interleaved_not_flatted_cat_by_obj_data_elem(self): D = Dict.to_dict(network=self.net, phases=[self.phase_1], flatten=False, interleave=False, categorize_by=['object', 'element', 'data']) # Ensure that data and element categorizations are ripped out proj = Dict.from_dict(D) assert len(proj) == 6 assert len(proj.geometries().values()) == 2 assert len(proj.phases().values()) == 1 assert len(proj.physics().values()) == 2
def load(cls, filename, project=None, delim=' | '): r""" Read in pore and throat data from a saved VTK file. Parameters ---------- filename : string (optional) The name of the file containing the data to import. The formatting of this file is outlined below. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project is supplied then one will be created and returned. """ net = {} filename = cls._parse_filename(filename, ext='vtp') tree = ET.parse(filename) piece_node = tree.find('PolyData').find('Piece') # Extract connectivity conn_element = piece_node.find('Lines').find('DataArray') conns = VTK._element_to_array(conn_element, 2) # Extract coordinates coord_element = piece_node.find('Points').find('DataArray') coords = VTK._element_to_array(coord_element, 3) # Extract pore data for item in piece_node.find('PointData').iter('DataArray'): key = item.get('Name') array = VTK._element_to_array(item) net[key] = array # Extract throat data for item in piece_node.find('CellData').iter('DataArray'): key = item.get('Name') array = VTK._element_to_array(item) net[key] = array if project is None: project = ws.new_project() project = Dict.from_dict(dct=net, project=project, delim=delim) # Clean up data values, if necessary, like convert array's of # 1's and 0's into boolean. project = cls._convert_data(project) # Add coords and conns to network network = project.network network.update({'throat.conns': conns}) network.update({'pore.coords': coords}) return project
def load(cls, filename, project=None, delim=' | '): r""" Opens a 'csv' file, reads in the data, and adds it to the **Network** Parameters ---------- filename : string (optional) The name of the file containing the data to import. The formatting of this file is outlined below. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project object is supplied then one will be created and returned. """ if project is None: project = ws.new_project() fname = cls._parse_filename(filename, ext='csv') a = pd.read_table(filepath_or_buffer=fname, sep=',', skipinitialspace=True, index_col=False, true_values=['T', 't', 'True', 'true', 'TRUE'], false_values=['F', 'f', 'False', 'false', 'FALSE']) dct = {} # First parse through all the items and re-merge columns keys = sorted(list(a.keys())) for item in keys: m = re.search(r'\[.\]', item) # The dot '.' is a wildcard if m: # m is None if pattern not found, otherwise merge cols pname = re.split(r'\[.\]', item)[0] # Get base propname # Find all other keys with same base propname merge_keys = [k for k in a.keys() if k.startswith(pname)] # Rerieve and remove arrays with same base propname merge_cols = [a.pop(k) for k in merge_keys] # Merge arrays into multi-column array and store in DataFrame dct[pname] = sp.vstack(merge_cols).T # Remove key from list of keys [keys.pop(keys.index(k)) for k in keys if k.startswith(pname)] else: dct[item] = sp.array(a.pop(item)) project = Dict.from_dict(dct, project=project, delim=delim) return project
def load(cls, filename, project=None): r""" Loads data onto the given network from an appropriately formatted 'mat' file (i.e. MatLAB output). Parameters ---------- filename : string (optional) The name of the file containing the data to import. The formatting of this file is outlined below. project : OpenPNM Project object A GenericNetwork is created and added to the specified Project. If no Project object is supplied then one will be created and returned. Returns ------- If no project object is supplied then one will be created and returned. """ filename = cls._parse_filename(filename=filename, ext='mat') data = spio.loadmat(filename) # Reinsert the '.' separator into the array names for item in list(data.keys()): if item in ['__header__', '__version__', '__globals__']: data.pop(item) continue elif '_pore_' in item: path, prop = item.split('_pore_') new_key = path + '|pore.' + prop elif '_throat_' in item: path, prop = item.split('_throat_') new_key = path + '|throat.' + prop data[new_key] = data.pop(item) if project is None: project = ws.new_project() project = Dict.from_dict(data, project=project, delim='|') project = cls._convert_data(project) return project