def test_shelve_save(mesh, casedir): mtf = MockTupleField(dict(save=True, save_as="shelve")) msf = MockScalarField(dict(save=True, save_as="shelve")) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([mtf, msf]) pp.update_all({}, 0.0, 0) pp.update_all({}, 0.1, 1) pp.update_all({}, 0.2, 2) pp.finalize_all() for mf in [mtf, msf]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".db")) md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'shelve' in md["0"] assert md['saveformats'] == ['shelve'] md.close() # Read back data = shelve.open( os.path.join(pp.get_savedir(mf.name), mf.name + ".db"), 'r') for i in ["0", "1", "2"]: d = data[i] data.close() assert d == pp.get(mf.name)
def test_basic_replay(mesh, casedir): spacepool = SpacePool(mesh) Q = spacepool.get_space(1,0) V = spacepool.get_space(1,1) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([ MockFunctionField(Q, dict(save=True)), MockVectorFunctionField(V, dict(save=True)) ]) replay_fields = lambda save: [Norm("MockFunctionField", dict(save=save)), Norm("MockVectorFunctionField", dict(save=save)), TimeIntegral("Norm_MockFunctionField", dict(save=save)),] rf_names = [f.name for f in replay_fields(False)] # Add fields, but don't save (for testing) pp.add_fields(replay_fields(False)) # Solutions to check against checks = {} pp.update_all({}, 0.0, 0) checks[0] = dict([(name, pp.get(name)) for name in rf_names]) pp.update_all({}, 0.1, 1) checks[1] = dict([(name, pp.get(name)) for name in rf_names]) pp.update_all({}, 0.2, 2) pp.finalize_all() checks[2] = dict([(name, pp.get(name)) for name in rf_names]) # Make sure that nothing is saved yet for name in rf_names: assert not os.path.isfile(os.path.join(pp.get_savedir(name), name+".db")) # ----------- Replay ----------------- pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([ MockFunctionField(Q), MockVectorFunctionField(V), ]) # This time, save the fields pp.add_fields(replay_fields(True)) replayer = Replay(pp) replayer.replay() # Test that replayed solution is the same as computed in the original "solve" for name in rf_names: data = shelve.open(os.path.join(pp.get_savedir(name), name+".db"), 'r') for i in range(3): assert data.get(str(i), None) == checks[i][name] or abs(data.get(str(i), None) - checks[i][name]) < 1e-8 data.close()
def test_xmlgz_save(mesh, casedir): spacepool = SpacePool(mesh) Q = spacepool.get_space(1, 0) V = spacepool.get_space(1, 1) mff = MockFunctionField(Q, dict(save=True, save_as="xml.gz")) mvff = MockVectorFunctionField(V, dict(save=True, save_as="xml.gz")) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([mff, mvff]) pp.update_all({}, 0.0, 0) pp.update_all({}, 0.1, 1) pp.update_all({}, 0.2, 2) pp.finalize_all() for mf, FS in [(mff, Q), (mvff, V)]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "mesh.hdf5")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "0.xml.gz")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "1.xml.gz")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "2.xml.gz")) md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'xml.gz' in md["0"] assert 'xml.gz' in md["1"] assert 'xml.gz' in md["2"] assert 'xml.gz' in md['saveformats'] assert md['saveformats'] == ['xml.gz'] md.close() assert len(os.listdir(pp.get_savedir(mf.name))) == 1 + 1 + 3 # Read back for i in ["0", "1", "2"]: f = Function( FS, os.path.join(pp.get_savedir(mf.name), mf.name + i + ".xml.gz")) assert norm(f) == norm(pp.get(mf.name))
def test_hdf5_save(mesh, casedir): spacepool = SpacePool(mesh) Q = spacepool.get_space(1, 0) V = spacepool.get_space(1, 1) mff = MockFunctionField(Q, dict(save=True, save_as="hdf5")) mvff = MockVectorFunctionField(V, dict(save=True, save_as="hdf5")) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([mff, mvff]) pp.update_all({}, 0.0, 0) pp.update_all({}, 0.1, 1) pp.update_all({}, 0.2, 2) pp.finalize_all() for mf, FS in [(mff, Q), (mvff, V)]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".hdf5")) md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'hdf5' in md["0"] assert 'hdf5' in md["1"] assert 'hdf5' in md["2"] assert 'hdf5' in md['saveformats'] assert md['saveformats'] == ['hdf5'] md.close() assert len(os.listdir(pp.get_savedir(mf.name))) == 2 # Read back hdf5file = HDF5File( mpi_comm_world(), os.path.join(pp.get_savedir(mf.name), mf.name + ".hdf5"), 'r') f = Function(FS) for i in ["0", "1", "2"]: hdf5file.read(f, mf.name + i) assert norm(f) == norm(pp.get(mf.name))
class Restart(Parameterized): """Class to fetch restart conditions through.""" #def __init__(self, params=None): # Parameterized.__init__(self, params) @classmethod def default_params(cls): """ Default parameters are: +----------------------+-----------------------+-------------------------------------------------------------------+ |Key | Default value | Description | +======================+=======================+===================================================================+ | casedir | '.' | Case directory - relative path to read solutions from | +----------------------+-----------------------+-------------------------------------------------------------------+ | restart_times | -1 | float or list of floats to find restart times from. If -1, | | | | restart from last available time. | +----------------------+-----------------------+-------------------------------------------------------------------+ | solution_names | 'default' | Solution names to look for. If 'default', will fetch all | | | | fields stored as SolutionField. | +----------------------+-----------------------+-------------------------------------------------------------------+ | rollback_casedir | False | Rollback case directory by removing all items stored after | | | | largest restart time. This allows for saving data from a | | | | restarted simulation in the same case directory. | +----------------------+-----------------------+-------------------------------------------------------------------+ """ params = ParamDict( casedir='.', restart_times=-1, #restart_timesteps=-1, solution_names="default", rollback_casedir=False, #interpolate=True, #dt=None, ) return params def get_restart_conditions(self, function_spaces="default"): """ Return restart conditions as requested. :param dict function_spaces: A dict of dolfin.FunctionSpace on which to return the restart conditions with solution name as key. """ self._pp = PostProcessor( dict(casedir=self.params.casedir, clean_casedir=False)) playlog = self._pp.get_playlog('r') assert playlog != {}, "Playlog is empty! Unable to find restart data." loadable_solutions = find_solution_presence(self._pp, playlog, self.params.solution_names) loadables = find_restart_items(self.params.restart_times, loadable_solutions) if function_spaces != "default": assert isinstance( function_spaces, dict), "Expecting function_spaces kwarg to be a dict" assert set(loadables.values()[0].keys()) == set( function_spaces.keys( )), "Expecting a function space for each solution variable" def restart_conditions(spaces, loadables): # loadables[restart_time0][solution_name] = [(t0, Lt0)] # will load Lt0 # loadables[restart_time0][solution_name] = [(t0, Lt0), (t1, Lt1)] # will interpolate to restart_time functions = {} for t in loadables: functions[t] = dict() for solution_name in loadables[t]: assert len(loadables[t][solution_name]) in [1, 2] if len(loadables[t][solution_name]) == 1: f = loadables[t][solution_name][0][1]() elif len(loadables[t][solution_name]) == 2: # Interpolate t0, Lt0 = loadables[t][solution_name][0] t1, Lt1 = loadables[t][solution_name][1] assert t0 <= t <= t1 if Lt0.function is not None: # The copy-function raise a PETSc-error in parallel #f = Function(Lt0()) f0 = Lt0() f = Function(f0.function_space()) f.vector().axpy(1.0, f0.vector()) del f0 df = Lt1().vector() df.axpy(-1.0, f.vector()) f.vector().axpy((t - t0) / (t1 - t0), df) else: f0 = Lt0() f1 = Lt1() datatype = type(f0) if not issubclass(datatype, Iterable): f0 = [f0] f1 = [f1] f = [] for _f0, _f1 in zip(f0, f1): val = _f0 + (t - t0) / (t1 - t0) * (_f1 - _f0) f.append(val) if not issubclass(datatype, Iterable): f = f[0] else: f = datatype(f) if solution_name in spaces: space = spaces[solution_name] if space != f.function_space(): #from fenicstools import interpolate_nonmatching_mesh #f = interpolate_nonmatching_mesh(f, space) try: f = interpolate(f, space) except: f = project(f, space) functions[t][solution_name] = f return functions if function_spaces == "default": function_spaces = {} for fieldname in loadables.values()[0]: try: function_spaces[fieldname] = loadables.values( )[0][fieldname][0][1].function.function_space() except AttributeError: # This was not a function field pass result = restart_conditions(function_spaces, loadables) ts = 0 while playlog[str(ts)]["t"] < max(loadables) - 1e-14: ts += 1 self.restart_timestep = ts playlog.close() MPI.barrier(mpi_comm_world()) if self.params.rollback_casedir: self._correct_postprocessing(ts) return result def _correct_postprocessing(self, restart_timestep): "Removes data from casedir found at timestep>restart_timestep." playlog = self._pp.get_playlog('r') playlog_to_remove = {} for k, v in playlog.items(): if int(k) >= restart_timestep: #playlog_to_remove[k] = playlog.pop(k) playlog_to_remove[k] = playlog[k] playlog.close() MPI.barrier(mpi_comm_world()) if on_master_process(): playlog = self._pp.get_playlog() [playlog.pop(k) for k in playlog_to_remove.keys()] playlog.close() MPI.barrier(mpi_comm_world()) all_fields_to_clean = [] for k, v in playlog_to_remove.items(): if "fields" not in v: continue else: all_fields_to_clean += v["fields"].keys() all_fields_to_clean = list(set(all_fields_to_clean)) for fieldname in all_fields_to_clean: self._clean_field(fieldname, restart_timestep) def _clean_field(self, fieldname, restart_timestep): "Deletes data from field found at timestep>restart_timestep." metadata = shelve.open( os.path.join(self._pp.get_savedir(fieldname), 'metadata.db'), 'r') metadata_to_remove = {} for k in metadata.keys(): #MPI.barrier(mpi_comm_world()) try: k = int(k) except: continue if k >= restart_timestep: #metadata_to_remove[str(k)] = metadata.pop(str(k)) metadata_to_remove[str(k)] = metadata[str(k)] metadata.close() MPI.barrier(mpi_comm_world()) if on_master_process(): metadata = shelve.open( os.path.join(self._pp.get_savedir(fieldname), 'metadata.db'), 'w') [metadata.pop(key) for key in metadata_to_remove.keys()] metadata.close() MPI.barrier(mpi_comm_world()) # Remove files and data for all save formats self._clean_hdf5(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) self._clean_files(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) self._clean_txt(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) self._clean_shelve(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) self._clean_xdmf(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) self._clean_pvd(fieldname, metadata_to_remove) MPI.barrier(mpi_comm_world()) def _clean_hdf5(self, fieldname, del_metadata): delete_from_hdf5_file = ''' namespace dolfin { #include <hdf5.h> void delete_from_hdf5_file(const MPI_Comm comm, const std::string hdf5_filename, const std::string dataset, const bool use_mpiio) { //const hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS); // Open file existing file for append //hid_t file_id = H5Fopen(filename.c_str(), H5F_ACC_RDWR, plist_id); hid_t hdf5_file_id = HDF5Interface::open_file(comm, hdf5_filename, "a", use_mpiio); H5Ldelete(hdf5_file_id, dataset.c_str(), H5P_DEFAULT); HDF5Interface::close_file(hdf5_file_id); } } ''' cpp_module = compile_extension_module( delete_from_hdf5_file, additional_system_headers=["dolfin/io/HDF5Interface.h"]) hdf5filename = os.path.join(self._pp.get_savedir(fieldname), fieldname + '.hdf5') if not os.path.isfile(hdf5filename): return for k, v in del_metadata.items(): if 'hdf5' not in v: continue else: cpp_module.delete_from_hdf5_file( mpi_comm_world(), hdf5filename, v['hdf5']['dataset'], MPI.size(mpi_comm_world()) > 1) hdf5tmpfilename = os.path.join(self._pp.get_savedir(fieldname), fieldname + '_tmp.hdf5') #import ipdb; ipdb.set_trace() MPI.barrier(mpi_comm_world()) if on_master_process(): # status, result = getstatusoutput("h5repack -V") status, result = -1, -1 if status != 0: cbc_warning( "Unable to run h5repack. Will not repack hdf5-files before replay, which may cause bloated hdf5-files." ) else: subprocess.call("h5repack %s %s" % (hdf5filename, hdf5tmpfilename), shell=True) os.remove(hdf5filename) os.rename(hdf5tmpfilename, hdf5filename) MPI.barrier(mpi_comm_world()) def _clean_files(self, fieldname, del_metadata): for k, v in del_metadata.items(): for i in v.values(): MPI.barrier(mpi_comm_world()) try: i["filename"] except: continue fullpath = os.path.join(self._pp.get_savedir(fieldname), i['filename']) if on_master_process(): os.remove(fullpath) MPI.barrier(mpi_comm_world()) """ #print k,v if 'filename' not in v: continue else: fullpath = os.path.join(self.postprocesor.get_savedir(fieldname), v['filename']) os.remove(fullpath) """ def _clean_txt(self, fieldname, del_metadata): txtfilename = os.path.join(self._pp.get_savedir(fieldname), fieldname + ".txt") if on_master_process() and os.path.isfile(txtfilename): txtfile = open(txtfilename, 'r') txtfilelines = txtfile.readlines() txtfile.close() num_lines_to_strp = ['txt' in v for v in del_metadata.values()].count(True) txtfile = open(txtfilename, 'w') [txtfile.write(l) for l in txtfilelines[:-num_lines_to_strp]] txtfile.close() def _clean_shelve(self, fieldname, del_metadata): shelvefilename = os.path.join(self._pp.get_savedir(fieldname), fieldname + ".db") if on_master_process(): if os.path.isfile(shelvefilename): shelvefile = shelve.open(shelvefilename, 'c') for k, v in del_metadata.items(): if 'shelve' in v: shelvefile.pop(str(k)) shelvefile.close() MPI.barrier(mpi_comm_world()) def _clean_xdmf(self, fieldname, del_metadata): basename = os.path.join(self._pp.get_savedir(fieldname), fieldname) if os.path.isfile(basename + ".xdmf"): MPI.barrier(mpi_comm_world()) i = 0 while True: h5_filename = basename + "_RS" + str(i) + ".h5" if not os.path.isfile(h5_filename): break i = i + 1 xdmf_filename = basename + "_RS" + str(i) + ".xdmf" MPI.barrier(mpi_comm_world()) if on_master_process(): os.rename(basename + ".h5", h5_filename) os.rename(basename + ".xdmf", xdmf_filename) f = open(xdmf_filename, 'r').read() new_f = open(xdmf_filename, 'w') new_f.write( f.replace( os.path.split(basename)[1] + ".h5", os.path.split(h5_filename)[1])) new_f.close() MPI.barrier(mpi_comm_world()) def _clean_pvd(self, fieldname, del_metadata): if os.path.isfile( os.path.join(self._pp.get_savedir(fieldname), fieldname + '.pvd')): cbc_warning( "No functionality for cleaning pvd-files for restart. Will overwrite." )
def test_pvd_save(mesh, casedir): spacepool = SpacePool(mesh) Q = spacepool.get_space(1, 0) V = spacepool.get_space(1, 1) mff = MockFunctionField(Q, dict(save=True, save_as="pvd")) mvff = MockVectorFunctionField(V, dict(save=True, save_as="pvd")) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([mff, mvff]) pp.update_all({}, 0.0, 0) pp.update_all({}, 0.1, 1) pp.update_all({}, 0.2, 2) pp.finalize_all() for mf, FS in [(mff, Q), (mvff, V)]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".pvd")) if MPI.size(mpi_comm_world()) == 1: assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.vtu" % 0)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.vtu" % 1)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.vtu" % 2)) else: assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.pvtu" % 0)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.pvtu" % 1)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "%0.6d.pvtu" % 2)) for i in range(MPI.size(mpi_comm_world())): assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "_p%d_%0.6d.vtu" % (i, 0))) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "_p%d_%0.6d.vtu" % (i, 1))) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + "_p%d_%0.6d.vtu" % (i, 2))) md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'pvd' in md["0"] assert 'pvd' in md["1"] assert 'pvd' in md["2"] assert md['saveformats'] == ['pvd'] md.close() assert len(os.listdir(pp.get_savedir(mf.name))) == 1 + 1 + 3 + int( MPI.size(mpi_comm_world()) != 1) * MPI.size(mpi_comm_world()) * 3
def test_default_save(mesh, casedir): spacepool = SpacePool(mesh) Q = spacepool.get_space(1, 0) V = spacepool.get_space(1, 1) mff = MockFunctionField(Q, dict(save=True)) mvff = MockVectorFunctionField(V, dict(save=True)) mtf = MockTupleField(dict(save=True)) msf = MockScalarField(dict(save=True)) pp = PostProcessor(dict(casedir=casedir)) pp.add_fields([mff, mvff, mtf, msf]) pp.update_all({}, 0.0, 0) pp.update_all({}, 0.1, 1) pp.update_all({}, 0.2, 2) pp.finalize_all() for mf in [mff, mvff]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".hdf5")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".h5")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".xdmf")) assert len(os.listdir(pp.get_savedir(mf.name))) == 4 md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'hdf5' in md["0"] assert 'hdf5' in md['saveformats'] assert 'xdmf' in md["0"] assert 'xdmf' in md['saveformats'] assert set(md['saveformats']) == set(['hdf5', 'xdmf']) md.close() for mf in [mtf, msf]: assert os.path.isdir(pp.get_savedir(mf.name)) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), "metadata.db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".db")) assert os.path.isfile( os.path.join(pp.get_savedir(mf.name), mf.name + ".txt")) md = shelve.open(os.path.join(pp.get_savedir(mf.name), "metadata.db"), 'r') assert 'txt' in md["0"] assert 'txt' in md['saveformats'] assert 'shelve' in md["0"] assert 'shelve' in md['saveformats'] assert set(md['saveformats']) == set(['txt', 'shelve']) md.close()