def _save_dicts(self, full_directory): DictIO.save_file(self._component_name_to_basis_component_index, full_directory, "component_name_to_basis_component_index") DictIO.save_file(self._component_name_to_basis_component_length, full_directory, "component_name_to_basis_component_length")
def save_file(vertices_mapping, directory, filename): if not filename.endswith(".vmp"): filename = filename + ".vmp" TextIO.save_file(vertices_mapping, directory, filename)
def _save_item_vector_dimension(self, full_directory): ItemVectorDimensionIO.save_file(self._list[0].N, full_directory, "item_vector_dimension")
def _save_len(self, full_directory): LenIO.save_file(len(self._list), full_directory, "len")
def _export(solution, directory, filename, suffix=None, component=None): if suffix is not None: filename = filename + "_" + str(suffix) TextIO.save_file(solution, directory, filename)
def _write_last_index(self, index): self._last_index = index # Write out current index IndexIO.save_file(index, self._directory, self._filename + "_index.sfx")
def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("matrix", full_directory, "content_item_type") ContentItemShapeIO.save_file((item.M, item.N), full_directory, "content_item_shape")
def _write_to_xdmf_file(fun, directory, filename, suffix, components=None): if components is not None: filename = filename + "_component_" + "".join(components) function_name = "function_" + "".join(components) else: function_name = "function" fun_rank = fun.value_rank() fun_dim = product(fun.value_shape()) assert fun_rank <= 2 if ((fun_rank is 1 and fun_dim not in (2, 3)) or (fun_rank is 2 and fun_dim not in (4, 9))): funs = fun.split(deepcopy=True) for (i, fun_i) in enumerate(funs): if components is not None: filename_i = filename + "_subcomponent_" + str(i) else: filename_i = filename + "_component_" + str(i) _write_to_xdmf_file(fun_i, directory, filename_i, suffix, None) else: full_filename_visualization = os.path.join(str(directory), filename + ".xdmf") full_filename_checkpoint = os.path.join(str(directory), filename + "_checkpoint.xdmf") if suffix is not None: if full_filename_checkpoint in _all_xdmf_files: assert _all_xdmf_latest_suffix[ full_filename_checkpoint] == suffix - 1 _all_xdmf_latest_suffix[full_filename_checkpoint] = suffix else: assert suffix == 0 # Remove existing files if any, as new functions should not be appended, # but rather overwrite existing functions if is_io_process() and os.path.exists( full_filename_checkpoint): os.remove(full_filename_checkpoint) os.remove(full_filename_checkpoint.replace(".xdmf", ".h5")) _all_xdmf_files[full_filename_visualization] = XDMFFile( full_filename_visualization) _all_xdmf_files[full_filename_checkpoint] = XDMFFile( full_filename_checkpoint) _all_xdmf_latest_suffix[ full_filename_checkpoint] = 0 # don't store these twice for both visualization _all_xdmf_functions[full_filename_checkpoint] = fun.copy( deepcopy=True) # and checkpoint, as they are the same! # Make sure to always use the same function, otherwise dolfin # changes the numbering and visualization is difficult in ParaView assign(_all_xdmf_functions[full_filename_checkpoint], fun) _all_xdmf_files[full_filename_visualization].write( _all_xdmf_functions[full_filename_checkpoint], float(suffix)) bak_log_level = get_log_level() set_log_level(int(WARNING) + 1) # disable xdmf logs _all_xdmf_files[full_filename_checkpoint].write_checkpoint( _all_xdmf_functions[full_filename_checkpoint], function_name, float(suffix)) set_log_level(bak_log_level) # Write out current suffix as well SuffixIO.save_file(suffix, directory, filename + "_suffix") else: # Remove existing files if any, as new functions should not be appended, # but rather overwrite existing functions if is_io_process() and os.path.exists(full_filename_checkpoint): os.remove(full_filename_checkpoint) os.remove(full_filename_checkpoint.replace(".xdmf", ".h5")) with XDMFFile(full_filename_visualization) as file_visualization: file_visualization.write(fun, 0.) with XDMFFile(full_filename_checkpoint) as file_checkpoint: file_checkpoint.write_checkpoint(fun, function_name, 0.)
def _save_content(self, item, it, full_directory): while not it.finished: ScalarContentIO.save_file(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext()
def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("empty", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape")
def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("basis_functions_matrix", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape")
def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("function", full_directory, "content_item_type") ContentItemShapeIO.save_file(item.N, full_directory, "content_item_shape")
def save(self, directory, filename): LengthIO.save_file(len(self._enrich_memory), directory, filename + "_length") for (index, memory) in enumerate(self._enrich_memory): memory.save(directory, filename + "_" + str(index))
def _save_Nmax(self, full_directory): if len(self._content) > 0: assert min(self._content.keys()) == 1 assert max(self._content.keys()) == len(self._content) NmaxIO.save_file(len(self._content), full_directory, "Nmax")
def save(self, directory, filename): # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) full_directory.create() # Export depending on type TypeIO.save_file(self._type, full_directory, "type") assert self._type in ("basis_functions_matrix", "empty", "error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22", "functions_list", "operators") if self._type in ("basis_functions_matrix", "functions_list"): # Save delayed functions delayed_functions = self._content[self._type] it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: delayed_function = delayed_functions[it.multi_index] delayed_function.save(full_directory, "delayed_functions_" + str(it.index)) it.iternext() elif self._type == "empty": pass elif self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22"): # Save delayed functions delayed_function_type = { DelayedBasisFunctionsMatrix: "DelayedBasisFunctionsMatrix", DelayedLinearSolver: "DelayedLinearSolver" } assert len(self._content["delayed_functions"]) is 2 for (index, delayed_functions) in enumerate( self._content["delayed_functions"]): it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: delayed_function = delayed_functions[it.index] DelayedFunctionsTypeIO.save_file( delayed_function_type[type(delayed_function)], full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_type") DelayedFunctionsProblemNameIO.save_file( delayed_function.get_problem_name(), full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_problem_name") delayed_function.save( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_content") it.iternext() ErrorEstimationInnerProductIO.save_file( get_reduced_problem_from_error_estimation_inner_product( self._content["inner_product_matrix"]).truth_problem.name( ), full_directory, "inner_product_matrix_problem_name") elif self._type == "operators": # Save truth content it = NonAffineExpansionStorageContent_Iterator( self._content["truth_operators"], flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: operator = self._content["truth_operators"][it.multi_index] assert isinstance( operator, (AbstractParametrizedTensorFactory, NumericForm)) if isinstance(operator, AbstractParametrizedTensorFactory): problem_name = get_problem_from_parametrized_operator( operator).name() (term, index) = get_term_and_index_from_parametrized_operator( operator) TruthContentItemIO.save_file( "ParametrizedTensorFactory", full_directory, "truth_operator_" + str(it.index) + "_type") TruthContentItemIO.save_file( (problem_name, term, index), full_directory, "truth_operator_" + str(it.index)) elif isinstance(operator, NumericForm): TruthContentItemIO.save_file( "NumericForm", full_directory, "truth_operator_" + str(it.index) + "_type") TruthContentItemIO.save_file( operator, full_directory, "truth_operator_" + str(it.index)) else: raise TypeError("Invalid operator type") it.iternext() assert "truth_operators_as_expansion_storage" in self._content # Save basis functions content assert len(self._content["basis_functions"]) in (0, 1, 2) BasisFunctionsContentLengthIO.save_file( len(self._content["basis_functions"]), full_directory, "basis_functions_length") for (index, basis_functions) in enumerate( self._content["basis_functions"]): BasisFunctionsProblemNameIO.save_file( get_reduced_problem_from_basis_functions( basis_functions).truth_problem.name(), full_directory, "basis_functions_" + str(index) + "_problem_name") BasisFunctionsProblemNameIO.save_file( basis_functions._components_name, full_directory, "basis_functions_" + str(index) + "_components_name") else: raise ValueError("Invalid type")
def save(self, directory, filename): # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) full_directory.create() # Save problem corresponding to self._lhs assert self._lhs is not None LHSIO.save_file( get_reduced_problem_from_riesz_solve_inner_product( self._lhs).truth_problem.name(), full_directory, "lhs_problem_name") # Save problem corresponding to self._solution assert self._solution is not None SolutionIO.save_file( get_reduced_problem_from_riesz_solve_storage( self._solution).truth_problem.name(), full_directory, "solution_problem_name") # Save problem and operator corresponding to self._rhs assert self._rhs is not None assert isinstance(self._rhs, (AbstractParametrizedTensorFactory, DelayedProduct)) if isinstance(self._rhs, AbstractParametrizedTensorFactory): RHSIO.save_file("ParametrizedTensorFactory", full_directory, "rhs_type") rhs_arg_0 = self._rhs rhs_problem_name_0 = get_problem_from_parametrized_operator( rhs_arg_0).name() (rhs_term_0, rhs_index_0 ) = get_term_and_index_from_parametrized_operator(rhs_arg_0) RHSIO.save_file((rhs_problem_name_0, rhs_term_0, rhs_index_0), full_directory, "rhs_arg_0") elif isinstance(self._rhs, DelayedProduct): RHSIO.save_file("DelayedProduct", full_directory, "rhs_type") assert len(self._rhs._args) is 3 rhs_arg_0 = self._rhs._args[0] assert rhs_arg_0 == -1.0 RHSIO.save_file(rhs_arg_0, full_directory, "rhs_arg_0") assert isinstance(self._rhs._args[1], AbstractParametrizedTensorFactory) rhs_arg_1 = self._rhs._args[1] rhs_problem_name_1 = get_problem_from_parametrized_operator( rhs_arg_1).name() (rhs_term_1, rhs_index_1 ) = get_term_and_index_from_parametrized_operator(rhs_arg_1) RHSIO.save_file((rhs_problem_name_1, rhs_term_1, rhs_index_1), full_directory, "rhs_arg_1") rhs_arg_2 = self._rhs._args[2] rhs_problem_name_2 = rhs_problem_name_1 (rhs_component_2, rhs_index_2 ) = get_component_and_index_from_basis_function(rhs_arg_2) RHSIO.save_file((rhs_problem_name_2, rhs_component_2, rhs_index_2), full_directory, "rhs_arg_2") else: raise TypeError("Invalid rhs") # Save problem corresponding to self._bcs BCsIO.save_file( get_reduced_problem_from_riesz_solve_homogeneous_dirichlet_bc( self._bcs).truth_problem.name(), full_directory, "bcs_problem_name") # Save parameters ParametersIO.save_file(self._parameters, full_directory, "parameters")
def export(solution, directory, filename, suffix=None, component=None): TextIO.save_file(solution, directory, filename)