def _basic_tensor_load(tensor, directory, filename): mpi_comm = tensor.mpi_comm() if not has_pybind11(): mpi_comm = mpi_comm.tompi4py() form = tensor.generator._form load_failed = False # Read in generator full_filename_generator = os.path.join(str(directory), filename + ".generator") generator_string = None if is_io_process(mpi_comm): if os.path.exists(full_filename_generator): with open(full_filename_generator, "r") as generator_file: generator_string = generator_file.readline() else: load_failed = True if mpi_comm.bcast(load_failed, root=is_io_process.root): raise OSError else: generator_string = mpi_comm.bcast(generator_string, root=is_io_process.root) # Read in generator mpi size full_filename_generator_mpi_size = os.path.join(str(directory), filename + ".generator_mpi_size") generator_mpi_size_string = None if is_io_process(mpi_comm): if os.path.exists(full_filename_generator_mpi_size): with open(full_filename_generator_mpi_size, "r") as generator_mpi_size_file: generator_mpi_size_string = generator_mpi_size_file.readline() else: load_failed = True if mpi_comm.bcast(load_failed, root=is_io_process.root): raise OSError else: generator_mpi_size_string = mpi_comm.bcast(generator_mpi_size_string, root=is_io_process.root) # Read in generator mapping from processor dependent indices (at the time of saving) to processor independent (global_cell_index, cell_dof) tuple permutation = _permutation_load(tensor, directory, filename, form, generator_string + "_" + generator_mpi_size_string, mpi_comm) _tensor_load(tensor, directory, filename, permutation)
def _basic_tensor_save(tensor, directory, filename): mpi_comm = tensor.mpi_comm() if not has_pybind11(): mpi_comm = mpi_comm.tompi4py() form = tensor.generator._form # Write out generator assert hasattr(tensor, "generator") full_filename_generator = os.path.join(str(directory), filename + ".generator") form_name = wrapping.form_name(form) if is_io_process(mpi_comm): with open(full_filename_generator, "w") as generator_file: generator_file.write(form_name) # Write out generator mpi size full_filename_generator_mpi_size = os.path.join( str(directory), filename + ".generator_mpi_size") if is_io_process(mpi_comm): with open(full_filename_generator_mpi_size, "w") as generator_mpi_size_file: generator_mpi_size_file.write(str(mpi_comm.size)) # Write out generator mapping from processor dependent indices to processor independent (global_cell_index, cell_dof) tuple _permutation_save(tensor, directory, form, form_name + "_" + str(mpi_comm.size), mpi_comm) # Write out content _tensor_save(tensor, directory, filename)
def create(self): return_value = False if is_io_process() and os.path.exists(self.name) and len( os.listdir(self.name)) == 0: # already created, but empty return_value = True if is_io_process() and not os.path.exists( self.name): # to be created return_value = True os.makedirs(self.name) return_value = is_io_process.mpi_comm.bcast( return_value, root=is_io_process.root) return return_value
def save_file(content, directory, filename): if not filename.endswith(".pkl"): filename = filename + ".pkl" if is_io_process(): with open(os.path.join(str(directory), filename), "wb") as outfile: pickle.dump(content, outfile, protocol=pickle.HIGHEST_PROTOCOL) is_io_process.mpi_comm.barrier()
def _read_from_xdmf_file(fun, directory, filename, suffix, components=None): if components is not None: filename = filename + "_component_" + "".join(components) function_name = "function_" + "".join(components) else: function_name = "function" fun_rank = fun.value_rank() fun_dim = product(fun.value_shape()) assert fun_rank <= 2 if ((fun_rank is 1 and fun_dim not in (2, 3)) or (fun_rank is 2 and fun_dim not in (4, 9))): fun_V = fun.function_space() for i in range(fun_dim): if components is not None: filename_i = filename + "_subcomponent_" + str(i) else: filename_i = filename + "_component_" + str(i) fun_i_V = get_function_subspace(fun_V, i) fun_i = Function(fun_i_V) if not _read_from_xdmf_file(fun_i, directory, filename_i, suffix, None): return False else: assign(fun.sub(i), fun_i) return True else: full_filename_checkpoint = os.path.join(str(directory), filename + "_checkpoint.xdmf") file_exists = False if is_io_process() and os.path.exists(full_filename_checkpoint): file_exists = True file_exists = is_io_process.mpi_comm.bcast(file_exists, root=is_io_process.root) if file_exists: if suffix is not None: assert SuffixIO.exists_file(directory, filename + "_suffix") last_suffix = SuffixIO.load_file(directory, filename + "_suffix") if suffix <= last_suffix: if full_filename_checkpoint in _all_xdmf_files: assert _all_xdmf_latest_suffix[ full_filename_checkpoint] == suffix - 1 _all_xdmf_latest_suffix[ full_filename_checkpoint] = suffix else: assert suffix == 0 _all_xdmf_files[full_filename_checkpoint] = XDMFFile( full_filename_checkpoint) _all_xdmf_latest_suffix[full_filename_checkpoint] = 0 _all_xdmf_files[full_filename_checkpoint].read_checkpoint( fun, function_name, suffix) return True else: return False else: with XDMFFile(full_filename_checkpoint) as file_checkpoint: file_checkpoint.read_checkpoint(fun, function_name, 0) return True else: return False
def _load_Nmax(self, directory, filename): Nmax = None if is_io_process(self.mpi_comm): with open(os.path.join(str(directory), filename, "reduced_mesh.length"), "r") as length: Nmax = int(length.readline()) Nmax = self.mpi_comm.bcast(Nmax, root=is_io_process.root) return Nmax
def save_file(content, directory, filename): if not filename.endswith(".sym"): filename = filename + ".sym" if is_io_process(): with open(os.path.join(str(directory), filename), "w") as outfile: outfile.write(python(content)) is_io_process.mpi_comm.barrier()
def exists_file(directory, filename): if not filename.endswith(".pkl"): filename = filename + ".pkl" exists = None if is_io_process(): exists = os.path.exists(os.path.join(str(directory), filename)) exists = is_io_process.mpi_comm.bcast(exists, root=is_io_process.root) return exists
def _load_Nmax(self, directory, filename): Nmax = None if is_io_process(self.mpi_comm): with open(str(directory) + "/" + filename + ".length", "r") as length: Nmax = int(length.readline()) Nmax = self.mpi_comm.bcast(Nmax, root=is_io_process.root) return Nmax
def save_file(content, directory, filename): if not filename.endswith(".csv"): filename = filename + ".csv" if is_io_process(): with open(os.path.join(str(directory), filename), "w") as outfile: writer = csv.writer(outfile, delimiter=";") writer.writerows(content) is_io_process.mpi_comm.barrier()
def _file_exists(directory, filename): file_exists = False if is_io_process() and os.path.exists( os.path.join(str(directory), filename)): file_exists = True file_exists = is_io_process.mpi_comm.bcast(file_exists, root=is_io_process.root) return file_exists
def remove_files(directory, filename): SolutionFile_Base.remove_files(directory, filename) # full_filename = os.path.join(str(directory), filename) if is_io_process() and os.path.exists(full_filename + ".xdmf"): os.remove(full_filename + ".xdmf") os.remove(full_filename + ".h5") os.remove(full_filename + "_checkpoint.xdmf") os.remove(full_filename + "_checkpoint.h5")
def generate(self, box, n, sampling=None): if len(box) > 0: if is_io_process(): if sampling is None: sampling = UniformDistribution() elif isinstance(sampling, tuple): assert len(sampling) == len(box) sampling = CompositeDistribution(sampling) self._list = sampling.sample(box, n) self._list = is_io_process.mpi_comm.bcast(self._list, root=0) else: for i in range(n): self._list.append(tuple())
def _read_from_xml_file(fun, directory, filename, suffix): if suffix is not None: filename = filename + "." + str(suffix) full_filename = os.path.join(str(directory), filename + ".xml") file_exists = False if is_io_process() and os.path.exists(full_filename): file_exists = True file_exists = is_io_process.mpi_comm.bcast(file_exists, root=is_io_process.root) if file_exists: file_ = File(full_filename) file_ >> fun return file_exists
def remove_files(directory, filename): full_filename = os.path.join(str(directory), filename) if is_io_process() and os.path.exists(full_filename + "_index.sfx"): os.remove(full_filename + "_index.sfx")
def touch_file(self, filename): if is_io_process(): with open(os.path.join(self.name, filename), "a"): os.utime(os.path.join(self.name, filename), None) is_io_process.mpi_comm.barrier()
def run_and_compare_to_gold_function(self): """ Handles the comparison of test/tutorial with gold files """ rootdir = str(self.config.rootdir) # Get action action = self.config.option.action assert action in ("compare", "regold", None) # Get data directory if action is not None: data_dir = self.config.option.data_dir assert data_dir is not None else: data_dir = None # Get current and reference directory current_dir = str(self.fspath.dirname) if action is not None: reference_dir = os.path.join( current_dir.replace(rootdir, data_dir), self.fspath.basename) current_dir = os.path.join(current_dir, subdirectory) reference_dir = os.path.join(reference_dir, subdirectory) else: reference_dir = None # Copy training and testing set if action is not None and is_io_process(): for set_ in ("testing_set", "training_set"): set_directories = glob.glob(os.path.join( reference_dir, "**", set_), recursive=True) if action == "compare": assert len(set_directories) > 0 for set_directory in set_directories: set_directory = os.path.relpath( set_directory, reference_dir) if os.path.exists( os.path.join(reference_dir, set_directory)): if os.path.exists( os.path.join(current_dir, set_directory)): shutil.rmtree( os.path.join(current_dir, set_directory)) shutil.copytree( os.path.join(reference_dir, set_directory), os.path.join(current_dir, set_directory)) # Run test/tutorial runtest(self) # Process results if is_io_process(): if action == "compare": failures = list() filenames = glob.glob(os.path.join(reference_dir, "**", "*.*"), recursive=True) assert len(filenames) > 0 for filename in filenames: filename = os.path.relpath(filename, reference_dir) diffs = diff(os.path.join(reference_dir, filename), os.path.join(current_dir, filename)) if len(diffs) > 0: failures.append(filename) os.makedirs(os.path.dirname( os.path.join(current_dir, filename + "_diff")), exist_ok=True) with open( os.path.join(current_dir, filename + "_diff"), "w") as failure_file: failure_file.writelines(diffs) if len(failures) > 0: raise RuntimeError( self.name + ", comparison has failed for the following files: " + str(failures) + ".") elif action == "regold": data_dir_repo = git.Repo(data_dir) assert not data_dir_repo.is_dirty() # Move current files to reference directory if os.path.exists(reference_dir): shutil.rmtree(reference_dir) shutil.copytree(current_dir, reference_dir) if os.path.exists(os.path.join(reference_dir, ".gitignore")): os.remove(os.path.join(reference_dir, ".gitignore")) data_dir_repo.git.add([reference_dir]) # Commit changes commit = str(git.Repo(rootdir).head.reference.commit) relpath = os.path.relpath(str(self.fspath), rootdir) if self.name != relpath: message = "Automatic regold of " + self.name + " in " + relpath + " at upstream commit " + commit else: message = "Automatic regold of " + relpath + " at upstream commit " + commit data_dir_repo.git.commit(message=message) # Clean repository data_dir_repo.git.clean("-Xdf")
def _save_Nmax(self, directory, filename): if is_io_process(self.mpi_comm): with open(os.path.join(str(directory), filename, "reduced_mesh.length"), "w") as length: length.write(str(len(self.reduced_mesh))) self.mpi_comm.barrier()
def write(self, file_): if is_io_process(): self._config_as_parser.write(file_) is_io_process.mpi_comm.barrier()
def _save_Nmax(self, directory, filename): if is_io_process(self.mpi_comm): with open(str(directory) + "/" + filename + ".length", "w") as length: length.write(str(len(self)))
def _write_to_xdmf_file(fun, directory, filename, suffix, components=None): if components is not None: filename = filename + "_component_" + "".join(components) function_name = "function_" + "".join(components) else: function_name = "function" fun_rank = fun.value_rank() fun_dim = product(fun.value_shape()) assert fun_rank <= 2 if ((fun_rank is 1 and fun_dim not in (2, 3)) or (fun_rank is 2 and fun_dim not in (4, 9))): funs = fun.split(deepcopy=True) for (i, fun_i) in enumerate(funs): if components is not None: filename_i = filename + "_subcomponent_" + str(i) else: filename_i = filename + "_component_" + str(i) _write_to_xdmf_file(fun_i, directory, filename_i, suffix, None) else: full_filename_visualization = os.path.join(str(directory), filename + ".xdmf") full_filename_checkpoint = os.path.join(str(directory), filename + "_checkpoint.xdmf") if suffix is not None: if full_filename_checkpoint in _all_xdmf_files: assert _all_xdmf_latest_suffix[ full_filename_checkpoint] == suffix - 1 _all_xdmf_latest_suffix[full_filename_checkpoint] = suffix else: assert suffix == 0 # Remove existing files if any, as new functions should not be appended, # but rather overwrite existing functions if is_io_process() and os.path.exists( full_filename_checkpoint): os.remove(full_filename_checkpoint) os.remove(full_filename_checkpoint.replace(".xdmf", ".h5")) _all_xdmf_files[full_filename_visualization] = XDMFFile( full_filename_visualization) _all_xdmf_files[full_filename_checkpoint] = XDMFFile( full_filename_checkpoint) _all_xdmf_latest_suffix[ full_filename_checkpoint] = 0 # don't store these twice for both visualization _all_xdmf_functions[full_filename_checkpoint] = fun.copy( deepcopy=True) # and checkpoint, as they are the same! # Make sure to always use the same function, otherwise dolfin # changes the numbering and visualization is difficult in ParaView assign(_all_xdmf_functions[full_filename_checkpoint], fun) _all_xdmf_files[full_filename_visualization].write( _all_xdmf_functions[full_filename_checkpoint], float(suffix)) bak_log_level = get_log_level() set_log_level(int(WARNING) + 1) # disable xdmf logs _all_xdmf_files[full_filename_checkpoint].write_checkpoint( _all_xdmf_functions[full_filename_checkpoint], function_name, float(suffix)) set_log_level(bak_log_level) # Write out current suffix as well SuffixIO.save_file(suffix, directory, filename + "_suffix") else: # Remove existing files if any, as new functions should not be appended, # but rather overwrite existing functions if is_io_process() and os.path.exists(full_filename_checkpoint): os.remove(full_filename_checkpoint) os.remove(full_filename_checkpoint.replace(".xdmf", ".h5")) with XDMFFile(full_filename_visualization) as file_visualization: file_visualization.write(fun, 0.) with XDMFFile(full_filename_checkpoint) as file_checkpoint: file_checkpoint.write_checkpoint(fun, function_name, 0.)
def save_file(content, directory, filename): if not filename.endswith(".npy"): filename = filename + ".npy" if is_io_process(): numpy.save(os.path.join(str(directory), filename), content) is_io_process.mpi_comm.barrier()
def _save_Nmax(self, directory, filename): if is_io_process(self.mpi_comm): with open(os.path.join(str(directory), filename + ".length"), "w") as length: length.write(str(len(self._list)))