示例#1
0
 def save_file(content, directory, filename):
     if os.path.splitext(filename)[1] == "":
         filename = filename + ".txt"
     def save_file_task():
         with open(os.path.join(str(directory), filename), "w") as outfile:
             outfile.write(repr(content))
     parallel_io(save_file_task)
示例#2
0
    def _save_Nmax(self, directory, filename):
        def save_Nmax_task():
            with open(os.path.join(str(directory), filename + ".length"),
                      "w") as length:
                length.write(str(len(self)))

        parallel_io(save_Nmax_task)
示例#3
0
    def _basic_tensor_save(tensor, directory, filename):
        mpi_comm = tensor.mpi_comm()
        if not has_pybind11():
            mpi_comm = mpi_comm.tompi4py()
        form = tensor.generator._form
        # Write out generator
        assert hasattr(tensor, "generator")
        full_filename_generator = os.path.join(str(directory),
                                               filename + ".generator")
        form_name = wrapping.form_name(form)

        def save_generator():
            with open(full_filename_generator, "w") as generator_file:
                generator_file.write(form_name)

        parallel_io(save_generator, mpi_comm)
        # Write out generator mpi size
        full_filename_generator_mpi_size = os.path.join(
            str(directory), filename + ".generator_mpi_size")

        def save_generator_mpi_size():
            with open(full_filename_generator_mpi_size,
                      "w") as generator_mpi_size_file:
                generator_mpi_size_file.write(str(mpi_comm.size))

        parallel_io(save_generator_mpi_size, mpi_comm)
        # Write out generator mapping from processor dependent indices to processor independent (global_cell_index, cell_dof) tuple
        _permutation_save(tensor, directory, form,
                          form_name + "_" + str(mpi_comm.size), mpi_comm)
        # Write out content
        _tensor_save(tensor, directory, filename, mpi_comm)
示例#4
0
def test_parallel_io_with_error_1():
    exception_message = "This test will fail"
    def task():
        raise RuntimeError(exception_message)
    with pytest.raises(RuntimeError) as excinfo:
        parallel_io(task)
    assert str(excinfo.value) == exception_message
示例#5
0
    def _basic_tensor_load(tensor, directory, filename):
        mpi_comm = tensor.mpi_comm()
        form = tensor.generator._form

        # Read in generator
        full_filename_generator = os.path.join(str(directory), filename + ".generator")

        def load_generator():
            if os.path.exists(full_filename_generator):
                with open(full_filename_generator, "r") as generator_file:
                    return generator_file.readline()
            else:
                raise OSError

        generator_string = parallel_io(load_generator, mpi_comm)

        # Read in generator mpi size
        full_filename_generator_mpi_size = os.path.join(str(directory), filename + ".generator_mpi_size")

        def load_generator_mpi_size():
            if os.path.exists(full_filename_generator_mpi_size):
                with open(full_filename_generator_mpi_size, "r") as generator_mpi_size_file:
                    return generator_mpi_size_file.readline()
            else:
                raise OSError

        generator_mpi_size_string = parallel_io(load_generator_mpi_size, mpi_comm)

        # Read in generator mapping from processor dependent indices (at the time of saving)
        # to processor independent (global_cell_index, cell_dof) tuple
        permutation = _permutation_load(tensor, directory, filename, form,
                                        generator_string + "_" + generator_mpi_size_string, mpi_comm)
        _tensor_load(tensor, directory, filename, permutation, mpi_comm)
示例#6
0
    def remove_files(directory, filename):
        full_filename = os.path.join(str(directory), filename)

        def remove_files_task():
            if os.path.exists(full_filename + "_index.sfx"):
                os.remove(full_filename + "_index.sfx")

        parallel_io(remove_files_task)
示例#7
0
    def save_file(content, directory, filename):
        if not filename.endswith(".npy"):
            filename = filename + ".npy"

        def save_file_task():
            numpy.save(os.path.join(str(directory), filename), content)

        parallel_io(save_file_task)
示例#8
0
def test_parallel_io_with_error_2():
    exception_message_1 = "This test"
    exception_message_2 = "will fail"
    def task():
        raise CustomError(exception_message_1, exception_message_2)
    with pytest.raises(CustomError) as excinfo:
        parallel_io(task)
    assert str(excinfo.value) == str((exception_message_1, exception_message_2))
示例#9
0
    def save_file(content, directory, filename):
        if not filename.endswith(".sym"):
            filename = filename + ".sym"

        def save_file_task():
            with open(os.path.join(str(directory), filename), "w") as outfile:
                outfile.write(python(content))

        parallel_io(save_file_task)
示例#10
0
    def save_file(content, directory, filename):
        if not filename.endswith(".pkl"):
            filename = filename + ".pkl"

        def save_file_task():
            with open(os.path.join(str(directory), filename), "wb") as outfile:
                pickle.dump(content, outfile, protocol=pickle.HIGHEST_PROTOCOL)

        parallel_io(save_file_task)
示例#11
0
    def save_file(content, directory, filename):
        if not filename.endswith(".csv"):
            filename = filename + ".csv"

        def save_file_task():
            with open(os.path.join(str(directory), filename), "w") as outfile:
                writer = csv.writer(outfile, delimiter=";")
                writer.writerows(content)

        parallel_io(save_file_task)
示例#12
0
 def remove_files(directory, filename):
     SolutionFile_Base.remove_files(directory, filename)
     #
     full_filename = os.path.join(str(directory), filename)
     def remove_files_task():
         if os.path.exists(full_filename + ".xdmf"):
             os.remove(full_filename + ".xdmf")
             os.remove(full_filename + ".h5")
             os.remove(full_filename + "_checkpoint.xdmf")
             os.remove(full_filename + "_checkpoint.h5")
     parallel_io(remove_files_task)
示例#13
0
        def patched_export_solution_internal(self_,
                                             folder=None,
                                             filename=None,
                                             *args,
                                             **kwargs):
            if str(folder) == str(snapshots_folder):
                assert (hasattr(truth_problem, "_cache_file_from_kwargs")
                        or hasattr(truth_problem, "_cache_file"))
                if hasattr(truth_problem,
                           "_cache_file_from_kwargs"):  # differential problem
                    cache_filename = truth_problem._cache_file_from_kwargs(
                        **truth_problem._latest_solve_kwargs)
                elif hasattr(truth_problem, "_cache_file"):  # EIM
                    cache_filename = truth_problem._cache_file()
                else:
                    raise AttributeError("Invalid cache file attribute.")

                def create_links():
                    for cache_path in glob.iglob(
                            os.path.join(str(cache_folder),
                                         cache_filename + "*")):
                        cache_path_filename = os.path.basename(cache_path)
                        cache_relpath = os.path.join(
                            os.path.relpath(str(cache_folder), str(folder)),
                            cache_path_filename)
                        snapshot_path = os.path.join(
                            str(folder),
                            cache_path_filename.replace(
                                cache_filename, filename))
                        if not os.path.exists(snapshot_path):
                            # Paraview output formats may require a light xml file that stores the path of a
                            # (possibly heavy) binary file. If the file is an xml file, we need to copy it and
                            # change the stored path. Otherwise, create a symbolic link.
                            try:
                                with open(cache_path, "r") as cache_file:
                                    header = cache_file.read(5)
                            except Exception:
                                should_link = True
                            else:
                                should_link = (header != "<?xml")
                            if should_link:
                                os.symlink(cache_relpath, snapshot_path)
                            else:
                                with open(cache_path, "r") as cache_file, open(
                                        snapshot_path, "w") as snapshot_file:
                                    for l in cache_file.readlines(
                                    ):  # noqa: E741
                                        snapshot_file.write(
                                            l.replace(cache_filename,
                                                      filename))

                parallel_io(create_links)
            else:
                original_export_solution(folder, filename, *args, **kwargs)
示例#14
0
    def write(self, file_or_file_object):
        assert isinstance(file_or_file_object, str) or file_or_file_object is sys.stdout, (
            "Please provide a file name and not a file object (except for sys.stdout)")
        if isinstance(file_or_file_object, str):
            def write_config_parser():
                with open(file_or_file_object, "w") as file_:
                    self._config_as_parser.write(file_)
        else:
            assert file_or_file_object is sys.stdout

            def write_config_parser():
                self._config_as_parser.write(file_or_file_object)
        parallel_io(write_config_parser)
示例#15
0
    def _load_Nmax(self, directory, filename):
        def load_Nmax_task():
            with open(os.path.join(str(directory), filename + ".length"),
                      "r") as length:
                return int(length.readline())

        return parallel_io(load_Nmax_task)
示例#16
0
    def exists_file(directory, filename):
        if not filename.endswith(".sym"):
            filename = filename + ".sym"

        def exists_file_task():
            return os.path.exists(os.path.join(str(directory), filename))

        return parallel_io(exists_file_task)
示例#17
0
    def exists_file(directory, filename):
        if os.path.splitext(filename)[1] == "":
            filename = filename + ".txt"

        def exists_file_task():
            return os.path.exists(os.path.join(str(directory), filename))

        return parallel_io(exists_file_task)
示例#18
0
 def create(self):
     def create_task():
         if os.path.exists(self.name) and len(os.listdir(self.name)) == 0: # already created, but empty
             return True
         if not os.path.exists(self.name): # to be created
             os.makedirs(self.name)
             return True
         return False
     return parallel_io(create_task)
示例#19
0
文件: config.py 项目: ljnpu/RBniCS
    def write(self, file_):
        def write_config_parser():
            self._config_as_parser.write(file_)

        parallel_io(write_config_parser)
示例#20
0
def test_parallel_io_with_return_value():
    def task():
        return COMM_WORLD.rank
    return_value = parallel_io(task)
    assert return_value == 0
示例#21
0
def test_parallel_io_without_return_value():
    def task():
        pass
    return_value = parallel_io(task)
    assert return_value is None
示例#22
0
        def run_and_compare_to_gold_function(self):
            """
            Handles the comparison of test/tutorial with gold files
            """

            rootdir = str(self.config.rootdir)

            # Get action
            action = self.config.option.action
            assert action in ("compare", "regold", None)

            # Get data directory
            if action is not None:
                data_dir = self.config.option.data_dir
                assert data_dir is not None
            else:
                data_dir = None

            # Get current and reference directory
            current_dir = str(self.fspath.dirname)
            if action is not None:
                reference_dir = os.path.join(
                    current_dir.replace(rootdir, data_dir),
                    self.fspath.basename)
                current_dir = os.path.join(current_dir, subdirectory)
                reference_dir = os.path.join(reference_dir, subdirectory)
            else:
                reference_dir = None

            # Copy training and testing sets
            if action is not None:

                def copy_training_and_testing_sets():
                    for set_ in ("testing_set", "training_set"):
                        set_directories = glob.glob(os.path.join(
                            reference_dir, "**", set_),
                                                    recursive=True)
                        if action == "compare":
                            assert len(set_directories) > 0
                        for set_directory in set_directories:
                            set_directory = os.path.relpath(
                                set_directory, reference_dir)
                            if os.path.exists(
                                    os.path.join(reference_dir,
                                                 set_directory)):
                                if os.path.exists(
                                        os.path.join(current_dir,
                                                     set_directory)):
                                    shutil.rmtree(
                                        os.path.join(current_dir,
                                                     set_directory))
                                shutil.copytree(
                                    os.path.join(reference_dir, set_directory),
                                    os.path.join(current_dir, set_directory))

                parallel_io(copy_training_and_testing_sets)

            # Run test/tutorial
            runtest(self)

            # Process results
            def process_results():
                if action == "compare":
                    failures = list()
                    filenames = glob.glob(os.path.join(reference_dir, "**",
                                                       "*.*"),
                                          recursive=True)
                    assert len(filenames) > 0
                    for filename in filenames:
                        filename = os.path.relpath(filename, reference_dir)
                        diffs = diff(os.path.join(reference_dir, filename),
                                     os.path.join(current_dir, filename))
                        if len(diffs) > 0:
                            failures.append(filename)
                            os.makedirs(os.path.dirname(
                                os.path.join(current_dir, filename + "_diff")),
                                        exist_ok=True)
                            with open(
                                    os.path.join(current_dir,
                                                 filename + "_diff"),
                                    "w") as failure_file:
                                failure_file.writelines(diffs)
                    if len(failures) > 0:
                        raise RuntimeError(
                            self.name +
                            ", comparison has failed for the following files: "
                            + str(failures) + ".")
                elif action == "regold":
                    data_dir_repo = git.Repo(data_dir)
                    assert not data_dir_repo.is_dirty()
                    # Move current files to reference directory
                    if os.path.exists(reference_dir):
                        shutil.rmtree(reference_dir)
                    shutil.copytree(current_dir, reference_dir)
                    if os.path.exists(os.path.join(reference_dir,
                                                   ".gitignore")):
                        os.remove(os.path.join(reference_dir, ".gitignore"))
                    data_dir_repo.git.add([reference_dir])
                    # Commit changes
                    commit = str(git.Repo(rootdir).head.reference.commit)
                    relpath = os.path.relpath(str(self.fspath), rootdir)
                    if self.name != relpath:
                        message = ("Automatic regold of " + self.name +
                                   " in " + relpath + " at upstream commit " +
                                   commit)
                    else:
                        message = "Automatic regold of " + relpath + " at upstream commit " + commit
                    data_dir_repo.git.commit(message=message)
                    # Clean repository
                    data_dir_repo.git.clean("-Xdf")

            parallel_io(process_results)
示例#23
0
    def _file_exists(directory, filename, mpi_comm):
        def file_exists_task():
            return os.path.exists(os.path.join(str(directory), filename))

        return parallel_io(file_exists_task, mpi_comm)
示例#24
0
文件: folders.py 项目: mfkiwl/RBniCS
        def touch_file(self, filename):
            def touch_file_task():
                with open(os.path.join(self.name, filename), "a"):
                    os.utime(os.path.join(self.name, filename), None)

            parallel_io(touch_file_task)