Ejemplo n.º 1
0
    def _clean_casedir(self):
        "Cleans out all files produced by cbcpost in the current casedir."
        MPI.barrier(mpi_comm_world())
        if on_master_process():
            if os.path.isdir(self.get_casedir()):
                try:
                    playlog = self._fetch_playlog()
                except:
                    playlog = None

                if playlog is not None:
                    all_fields = []
                    for v in playlog.values():
                        all_fields += v.get("fields", {}).keys()

                    all_fields = list(set(all_fields))
                    playlog.close()

                    for field in all_fields:
                        rmtree(os.path.join(self.get_casedir(), field))

                    for f in [
                            "mesh.hdf5", "play.db", "params.txt",
                            "params.pickle"
                    ]:

                        if os.path.isfile(os.path.join(self.get_casedir(), f)):
                            os.remove(os.path.join(self.get_casedir(), f))

        MPI.barrier(mpi_comm_world())
Ejemplo n.º 2
0
    def _clean_xdmf(self, fieldname, del_metadata):
        basename = os.path.join(self._pp.get_savedir(fieldname), fieldname)
        if os.path.isfile(basename + ".xdmf"):
            MPI.barrier(mpi_comm_world())

            i = 0
            while True:
                h5_filename = basename + "_RS" + str(i) + ".h5"
                if not os.path.isfile(h5_filename):
                    break
                i = i + 1

            xdmf_filename = basename + "_RS" + str(i) + ".xdmf"
            MPI.barrier(mpi_comm_world())

            if on_master_process():
                os.rename(basename + ".h5", h5_filename)
                os.rename(basename + ".xdmf", xdmf_filename)

                f = open(xdmf_filename, 'r').read()

                new_f = open(xdmf_filename, 'w')
                new_f.write(
                    f.replace(
                        os.path.split(basename)[1] + ".h5",
                        os.path.split(h5_filename)[1]))
                new_f.close()
        MPI.barrier(mpi_comm_world())
Ejemplo n.º 3
0
    def _close_shelves(self):
        "Close all shelve files"
        if on_master_process():
            """
            for k in self._metadata_cache.keys():
                f = self._metadata_cache[k]
                f.sync()
                f.close()
                #self._metadata_cache[k] = None
                self._metadata_cache.pop(k)
            """

            if self._playlog[self.get_casedir()] is not None:
                self._playlog[self.get_casedir()].sync()
                self._playlog[self.get_casedir()].close()
                self._playlog[self.get_casedir()] = None

            #for key, f in self._datafile_cache.iteritems():
            for key in self._datafile_cache.keys():
                fieldname, saveformat = key
                if saveformat == "shelve":
                    f = self._datafile_cache[key]
                    f.sync()
                    f.close()
                    #self._datafile_cache[key] = None
                    self._datafile_cache.pop(key)
Ejemplo n.º 4
0
    def _update_metadata_file(self, field_name, data, t, timestep, save_as,
                              metadata):
        "Update metadata shelve file from master process."
        if on_master_process():
            if self._metadata_cache.get(field_name) is None:
                savedir = self.get_savedir(field_name)
                metadata_filename = os.path.join(savedir, 'metadata.db')
                #metadata_file = shelve.open(metadata_filename)
                metadata_file = shelve.open(metadata_filename)
                self._metadata_cache[field_name] = metadata_file

            metadata_file = self._metadata_cache[field_name]

            # Store some data the first time
            if "type" not in metadata_file and data is not None:
                # Data about type and formats
                metadata_file["type"] = type(data).__name__
                metadata_file["saveformats"] = list(
                    set(save_as + metadata_file.get("saveformats", [])))
                # Data about function space
                if isinstance(data, Function):
                    if LooseVersion(dolfin_version()) > LooseVersion("1.6.0"):
                        element = data.ufl_element()
                    else:
                        element = data.element()

                    metadata_file["element"] = repr(element, )
                    metadata_file["element_degree"] = repr(element.degree(), )
                    metadata_file["element_family"] = repr(element.family(), )
                    metadata_file["element_value_shape"] = repr(
                        element.value_shape(), )
            # Store some data each timestep
            metadata_file[str(timestep)] = metadata
            metadata_file[str(timestep)]["t"] = t
Ejemplo n.º 5
0
    def _correct_postprocessing(self, restart_timestep):
        "Removes data from casedir found at timestep>restart_timestep."
        playlog = self._pp.get_playlog('r')
        playlog_to_remove = {}
        for k, v in playlog.items():
            if int(k) >= restart_timestep:
                #playlog_to_remove[k] = playlog.pop(k)
                playlog_to_remove[k] = playlog[k]
        playlog.close()

        MPI.barrier(mpi_comm_world())
        if on_master_process():
            playlog = self._pp.get_playlog()
            [playlog.pop(k) for k in playlog_to_remove.keys()]
            playlog.close()

        MPI.barrier(mpi_comm_world())
        all_fields_to_clean = []

        for k, v in playlog_to_remove.items():
            if "fields" not in v:
                continue
            else:
                all_fields_to_clean += v["fields"].keys()
        all_fields_to_clean = list(set(all_fields_to_clean))
        for fieldname in all_fields_to_clean:
            self._clean_field(fieldname, restart_timestep)
Ejemplo n.º 6
0
 def _update_playlog(self, t, timestep):
     "Update play log from master process with current time"
     if on_master_process():
         if self._playlog[self.get_casedir()] is None:
             self._playlog[self.get_casedir()] = self._fetch_playlog()
         #playlog = self._fetch_playlog()
         if str(timestep) in self._playlog[self.get_casedir()]:
             #playlog.close()
             return
         self._playlog[self.get_casedir()][str(timestep)] = {"t": float(t)}
Ejemplo n.º 7
0
 def _clean_shelve(self, fieldname, del_metadata):
     shelvefilename = os.path.join(self._pp.get_savedir(fieldname), fieldname+".db")
     if on_master_process():
         if os.path.isfile(shelvefilename):
             shelvefile = shelve.open(shelvefilename, 'c')
             for k,v in del_metadata.items():
                 if 'shelve' in v:
                     shelvefile.pop(str(k))
             shelvefile.close()
     MPI.barrier(mpi_comm_world())
Ejemplo n.º 8
0
    def _clean_hdf5(self, fieldname, del_metadata):
        delete_from_hdf5_file = '''
        namespace dolfin {
            #include <hdf5.h>
            void delete_from_hdf5_file(const MPI_Comm comm,
                                       const std::string hdf5_filename,
                                       const std::string dataset,
                                       const bool use_mpiio)
            {
                //const hid_t plist_id = H5Pcreate(H5P_FILE_ACCESS);
                // Open file existing file for append
                //hid_t file_id = H5Fopen(filename.c_str(), H5F_ACC_RDWR, plist_id);
                hid_t hdf5_file_id = HDF5Interface::open_file(comm, hdf5_filename, "a", use_mpiio);

                H5Ldelete(hdf5_file_id, dataset.c_str(), H5P_DEFAULT);
                HDF5Interface::close_file(hdf5_file_id);
            }
        }
        '''
        cpp_module = compile_extension_module(
            delete_from_hdf5_file,
            additional_system_headers=["dolfin/io/HDF5Interface.h"])

        hdf5filename = os.path.join(self._pp.get_savedir(fieldname),
                                    fieldname + '.hdf5')

        if not os.path.isfile(hdf5filename):
            return

        for k, v in del_metadata.items():
            if 'hdf5' not in v:
                continue
            else:
                cpp_module.delete_from_hdf5_file(
                    mpi_comm_world(), hdf5filename, v['hdf5']['dataset'],
                    MPI.size(mpi_comm_world()) > 1)

        hdf5tmpfilename = os.path.join(self._pp.get_savedir(fieldname),
                                       fieldname + '_tmp.hdf5')
        #import ipdb; ipdb.set_trace()
        MPI.barrier(mpi_comm_world())
        if on_master_process():
            # status, result = getstatusoutput("h5repack -V")
            status, result = -1, -1
            if status != 0:
                cbc_warning(
                    "Unable to run h5repack. Will not repack hdf5-files before replay, which may cause bloated hdf5-files."
                )
            else:
                subprocess.call("h5repack %s %s" %
                                (hdf5filename, hdf5tmpfilename),
                                shell=True)
                os.remove(hdf5filename)
                os.rename(hdf5tmpfilename, hdf5filename)
        MPI.barrier(mpi_comm_world())
Ejemplo n.º 9
0
    def _flush_data(self):
        "Flush data to disk"
        if on_master_process():
            for f in self._metadata_cache.values():
                f.sync()
            if self._playlog[self.get_casedir()] is not None:
                self._playlog[self.get_casedir()].sync()

            for key, f in self._datafile_cache.items():
                fieldname, saveformat = key
                if saveformat == "shelve":
                    f.sync()
Ejemplo n.º 10
0
    def _update_txt_file(self, field_name, saveformat, data, timestep, t):
        "Update txt file with a string representation of the data."
        # TODO: Identify which more well defined data formats we need
        assert saveformat == "txt"
        fullname, metadata = self._get_datafile_name(field_name, saveformat,
                                                     timestep)
        if on_master_process():
            datafile = open(fullname, 'a')
            datafile.write(str(data))
            datafile.write("\n")
            datafile.close()

        return metadata
Ejemplo n.º 11
0
    def _clean_txt(self, fieldname, del_metadata):
        txtfilename = os.path.join(self._pp.get_savedir(fieldname), fieldname+".txt")
        if on_master_process() and os.path.isfile(txtfilename):
            txtfile = open(txtfilename, 'r')
            txtfilelines = txtfile.readlines()
            txtfile.close()

            num_lines_to_strp = ['txt' in v for v in del_metadata.values()].count(True)

            txtfile = open(txtfilename, 'w')
            [txtfile.write(l) for l in txtfilelines[:-num_lines_to_strp]]

            txtfile.close()
Ejemplo n.º 12
0
 def _fill_playlog(self, field, timestep, save_as):
     "Update play log with the data that has been stored at this timestep"
     if on_master_process():
         if self._playlog[self.get_casedir()] is None:
             self._playlog[self.get_casedir()] = self._fetch_playlog()
         timestep_dict = dict(
             self._playlog[self.get_casedir()][str(timestep)])
         if "fields" not in timestep_dict:
             timestep_dict["fields"] = {}
         timestep_dict["fields"][field.name] = {
             "type": field.__class__.shortname(),
             "save_as": save_as
         }
         self._playlog[self.get_casedir()][str(timestep)] = timestep_dict
Ejemplo n.º 13
0
    def _clean_files(self, fieldname, del_metadata):
        for k, v in del_metadata.items():
            for i in v.values():
                MPI.barrier(mpi_comm_world())
                try:
                    i["filename"]
                except:
                    continue

                fullpath = os.path.join(self._pp.get_savedir(fieldname), i['filename'])

                if on_master_process():
                    os.remove(fullpath)
                MPI.barrier(mpi_comm_world())
            """
Ejemplo n.º 14
0
    def _update_shelve_file(self, field_name, saveformat, data, timestep, t):
        "Update shelve file with new data."
        assert saveformat == "shelve"
        fullname, metadata = self._get_datafile_name(field_name, saveformat,
                                                     timestep)
        if on_master_process():
            key = (field_name, saveformat)
            datafile = self._datafile_cache.get(key)
            if datafile is None:
                datafile = shelve.open(fullname)
                self._datafile_cache[key] = datafile

            #datafile = shelve.open(fullname)
            datafile[str(timestep)] = data
            #datafile.close()

        return metadata
Ejemplo n.º 15
0
    def _clean_field(self, fieldname, restart_timestep):
        "Deletes data from field found at timestep>restart_timestep."
        metadata = shelve.open(
            os.path.join(self._pp.get_savedir(fieldname), 'metadata.db'), 'r')
        metadata_to_remove = {}
        for k in metadata.keys():
            #MPI.barrier(mpi_comm_world())
            try:
                k = int(k)
            except:
                continue
            if k >= restart_timestep:
                #metadata_to_remove[str(k)] = metadata.pop(str(k))
                metadata_to_remove[str(k)] = metadata[str(k)]
        metadata.close()
        MPI.barrier(mpi_comm_world())
        if on_master_process():
            metadata = shelve.open(
                os.path.join(self._pp.get_savedir(fieldname), 'metadata.db'),
                'w')
            [metadata.pop(key) for key in metadata_to_remove.keys()]
            metadata.close()
        MPI.barrier(mpi_comm_world())

        # Remove files and data for all save formats
        self._clean_hdf5(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())
        self._clean_files(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())

        self._clean_txt(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())

        self._clean_shelve(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())

        self._clean_xdmf(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())

        self._clean_pvd(fieldname, metadata_to_remove)
        MPI.barrier(mpi_comm_world())
Ejemplo n.º 16
0
def test_rollback_casedir(filled_casedir, mesh, t):
    spacepool = SpacePool(mesh)
    Q = spacepool.get_space(2, 0)
    V = spacepool.get_space(2, 1)

    D = mesh.geometry().dim()
    expr_scalar = Expression("1+x[0]*x[1]*t", degree=1, t=t)
    expr = Expression(("1+x[0]*t", "3+x[1]*t", "10+x[2]*t")[:D], degree=1, t=t)

    expr_scalar = Expression("1+x[0]*x[1]*t", degree=1, t=t)

    restart = Restart(
        dict(casedir=filled_casedir, rollback_casedir=True, restart_times=t))
    assert os.path.isfile(os.path.join(filled_casedir, "play.db"))

    data = restart.get_restart_conditions()

    playlog = shelve.open(os.path.join(filled_casedir, "play.db"), 'r')

    assert max([v["t"] for v in playlog.values()]) < t
    assert max([v["t"] for v in playlog.values()]) > t - 0.25 - 1e-14
    playlog.close()

    for d in os.listdir(filled_casedir):
        if not os.path.isdir(os.path.join(filled_casedir, d)):
            continue

        assert os.path.isfile(os.path.join(filled_casedir, d, "metadata.db"))
        md = shelve.open(os.path.join(filled_casedir, d, "metadata.db"), 'r')

        savetimes = {
            "shelve": [],
            "txt": [],
            "xml": [],
            "xml.gz": [],
            "hdf5": [],
            "pvd": [],
            "xdmf": []
        }
        assert max(
            [v.get("time", -1)
             for v in md.values() if isinstance(v, dict)]) < t
        for k in md:
            try:
                int(k)
            except:
                continue

            for sf in set(md[k].keys()).intersection(savetimes.keys()):
                savetimes[sf].append(k)

        md.close()

        for sf, st in savetimes.items():
            if st == []:
                continue
            if sf in ["xml", "xml.gz"]:
                xmlfiles = glob.glob1(os.path.join(filled_casedir, d),
                                      "*." + sf)
                assert sorted(xmlfiles) == sorted(
                    [d + i + "." + sf for i in st])
            elif sf == "shelve":
                if on_master_process():
                    data = shelve.open(
                        os.path.join(filled_casedir, d, d + ".db"))
                    assert sorted(data.keys()) == sorted(st)
                    data.close()
            elif sf == "txt":
                data = open(os.path.join(filled_casedir, d, d + ".txt"),
                            'r').readlines()
                assert len(data) == len(st)
            elif sf == "pvd":
                pass
            elif sf == "xdmf":
                xdmffiles = glob.glob1(os.path.join(filled_casedir, d),
                                       "*_RS0.xdmf")
                assert xdmffiles == [d + "_RS0.xdmf"]
                h5files = glob.glob1(os.path.join(filled_casedir, d),
                                     "*_RS0.h5")
                assert h5files == [d + "_RS0.h5"]
            elif sf == "hdf5":
                filename = os.path.join(filled_casedir, d, d + ".hdf5")
                assert os.path.isfile(filename)
                #datasets = [u''+d+i for i in st]+[u'Mesh']
                datasets = [d + i for i in st] + ['Mesh']

                cpp_code = """
                #include <hdf5.h>
                std::size_t size(MPI_Comm comm,
                          const std::string hdf5_filename,
                          bool use_mpiio)
                {
                    hid_t hdf5_file_id = HDF5Interface::open_file(comm, hdf5_filename, "r", use_mpiio);
                    std::size_t num_datasets = HDF5Interface::num_datasets_in_group(hdf5_file_id, "/");
                    HDF5Interface::close_file(hdf5_file_id);
                    return num_datasets;
                    //herr_t status = H5Lcreate_hard(hdf5_file_id, link_from.c_str(), H5L_SAME_LOC,
                    //                    link_to.c_str(), H5P_DEFAULT, H5P_DEFAULT);

                    //dolfin_assert(status != HDF5_FAIL);

                }
                """

                cpp_module = compile_extension_module(
                    cpp_code,
                    additional_system_headers=["dolfin/io/HDF5Interface.h"])
                num_datasets = cpp_module.size(mpi_comm_world(), filename,
                                               MPI.size(mpi_comm_world()) > 1)
                assert num_datasets == len(st) + 2

                f = HDF5File(mpi_comm_world(), filename, 'r')
                for ds in datasets:
                    assert f.has_dataset(ds)
                del f
                return