def load_project(cls, filename): f = cls._parse_filename(filename, 'pnm') with hdfFile(f, mode='r') as root: logger.info('Loading project from file ' + f.name) try: # Create an empty project with old name proj = Project(name=root.attrs['name']) logger.info('Loading ' + proj.name) except Exception: # Generate a new name if collision occurs proj = Project() logger.warning('A project named ' + root.attrs['name'] + ' already exists, renaming to ' + proj.name) logger.info('Created using OpenPNM version ' + root.attrs['version']) logger.info('Saved on ' + root.attrs['date saved']) loglevel = ws.settings['loglevel'] ws.settings['loglevel'] = 50 for name in root.keys(): if 'network' in root[name].attrs['class']: proj, obj = create_obj(root, name, proj) for name in root.keys(): if 'network' not in root[name].attrs['class']: proj, obj = create_obj(root, name, proj) ws.settings['loglevel'] = loglevel return proj
def to_hdf5(cls, network=None, phases=[], element=['pore', 'throat'], filename='', interleave=True, flatten=False, categorize_by=[]): r""" Creates an HDF5 file containing data from the specified objects, and categorized according to the given arguments. Parameters ---------- network : OpenPNM Network Object The network containing the desired data phases : list of OpenPNM Phase Objects (optional, default is none) A list of phase objects whose data are to be included element : string or list of strings An indication of whether 'pore' and/or 'throat' data are desired. The default is both. interleave : boolean (default is ``True``) When ``True`` (default) the data from all Geometry objects (and Physics objects if ``phases`` are given) is interleaved into a single array and stored as a network property (or Phase property for Physics data). When ``False``, the data for each object are stored under their own dictionary key, the structuring of which depends on the value of the ``flatten`` argument. flatten : boolean (default is ``True``) When ``True``, all objects are accessible from the top level of the dictionary. When ``False`` objects are nested under their parent object. If ``interleave`` is ``True`` this argument is ignored. categorize_by : string or list of strings Indicates how the dictionaries should be organized. The list can contain any, all or none of the following strings: **'objects'** : If specified the dictionary keys will be stored under a general level corresponding to their type (e.g. 'network/net_01/pore.all'). If ``interleave`` is ``True`` then only the only categories are *network* and *phase*, since *geometry* and *physics* data get stored under their respective *network* and *phase*. **'data'** : If specified the data arrays are additionally categorized by ``label`` and ``property`` to separate *boolean* from *numeric* data. **'elements'** : If specified the data arrays are additionally categorized by ``pore`` and ``throat``, meaning that the propnames are no longer prepended by a 'pore.' or 'throat.' """ project, network, phases = cls._parse_args(network=network, phases=phases) if filename == '': filename = project.name filename = cls._parse_filename(filename, ext='hdf') dct = Dict.to_dict(network=network, phases=phases, element=element, interleave=interleave, flatten=flatten, categorize_by=categorize_by) d = FlatDict(dct, delimiter='/') f = hdfFile(filename, "w") for item in d.keys(): tempname = '_'.join(item.split('.')) arr = d[item] if d[item].dtype == 'O': logger.warning(item + ' has dtype object,' + ' will not write to file') del d[item] elif 'U' in str(arr[0].dtype): pass else: f.create_dataset(name='/' + tempname, shape=arr.shape, dtype=arr.dtype, data=arr) return f
def save_project(cls, project, filename=None): if filename is None: filename = project.name + '.pnm' # Make a directory using the given file name f = cls._parse_filename(filename, 'pnm') with hdfFile(f, mode='w') as root: # root = hdfFile(f, mode='w') root.attrs['version'] = ws.version date = datetime.today().strftime("%Y %h %d %H:%M:%S") root.attrs['date saved'] = date root.attrs['name'] = project.name # root.attrs['comments'] = project.comments for obj in project: found_attrs = set(obj.__dict__.keys()) known_attrs = set([ 'settings', '_models_dict', '_am', '_im', '_spacing', '_shape' ]) foreign_attrs = found_attrs.difference(known_attrs) if len(foreign_attrs) > 0: line_break = f"\n{'':13}" logger.critical( f"{obj.name} has the following attributes that will" + f" not be saved: {[i for i in foreign_attrs]}" + f"{line_break}Consider using Pickle instead") item = root.create_group(obj.name) for arr in obj.keys(): # Store data try: item.create_dataset(name=arr, data=obj[arr], shape=obj[arr].shape, compression="gzip") except TypeError: # Deal with 'object' arrays logger.warning(arr + ' is being converted to a string') b = jsont.dumps(obj[arr]) c = b.encode() d = np.void(c) item.create_dataset(name=arr, data=d) # Store settings dict as metadata # item.attrs['settings'] = json.dumps(obj.settings) # Store models dict as metadata if hasattr(obj, 'models'): obj_models = {} model = None for model in obj.models.keys(): temp = { k: v for k, v in obj.models[model].items() if k != 'model' } if 'model' in obj.models[model].keys(): a = obj.models[model]['model'] temp['model'] = a.__module__ + '|' + \ a.__code__.co_name obj_models[model] = temp try: item.attrs['models'] = json.dumps(obj_models) except TypeError: logger.critical( f'The model {model} and its parameters' + ' could not be written to file.') item.attrs['class'] = str(obj.__class__)