def _init_speedup_analysis(self, **kwargs):
        # Make sure to clean up snapshot cache to ensure that parametrized
        # expression evaluation is actually carried out
        self.EIM_approximation._snapshot_cache.clear()

        # ... and also disable the capability of importing/exporting truth solutions
        def disable_import_solution_method(self_,
                                           folder,
                                           filename,
                                           solution=None):
            raise OSError

        self.disable_import_solution = PatchInstanceMethod(
            self.EIM_approximation, "import_solution",
            disable_import_solution_method)

        def disable_export_solution_method(self_,
                                           folder,
                                           filename,
                                           solution=None):
            pass

        self.disable_export_solution = PatchInstanceMethod(
            self.EIM_approximation, "export_solution",
            disable_export_solution_method)
        self.disable_import_solution.patch()
        self.disable_export_solution.patch()
示例#2
0
 def _replace_stability_factor_lower_bound_computation(self, **kwargs):
     self._replace_stability_factor_lower_bound_computation__get_stability_factor_lower_bound__original = (
         self.reduced_problem.get_stability_factor_lower_bound)
     if "SCM" not in kwargs:
         if "with_respect_to" in kwargs:
             assert inspect.isfunction(kwargs["with_respect_to"])
             other_truth_problem = kwargs["with_respect_to"](self.truth_problem)
             # Assume that the user wants to disable SCM and use the exact stability factor
             self.replace_get_stability_factor_lower_bound = PatchInstanceMethod(
                 self.truth_problem,
                 "get_stability_factor_lower_bound",
                 lambda self_: other_truth_problem.get_stability_factor_lower_bound()
             )
             self.replace_get_stability_factor_lower_bound.patch()
         else:
             self.replace_get_stability_factor_lower_bound = None
     else:
         assert isinstance(kwargs["SCM"], int)
         # Assume that the user wants to use SCM with a prescribed number of basis functions
         self.replace_get_stability_factor_lower_bound = PatchInstanceMethod(
             self.truth_problem,
             "get_stability_factor_lower_bound",
             lambda self_: self_.SCM_approximation.get_stability_factor_lower_bound(kwargs["SCM"])
         )
         self.replace_get_stability_factor_lower_bound.patch()
示例#3
0
def _enable_string_components(components, function_space):
    _init_component_to_index(components, function_space)

    original_sub = function_space.sub

    def custom_sub(self_, i):
        assert i is not None
        i_int = _convert_component_to_int(self_, i)
        if i_int is None:

            def custom_collapse(self_, collapsed_dofs=False):
                assert not collapsed_dofs
                return self_

            PatchInstanceMethod(self_, "collapse", custom_collapse).patch()
            return self_
        assert isinstance(i_int, (int, tuple))
        if isinstance(i_int, int):
            output = original_sub(i_int)
        else:
            output = self_.extract_sub_space(i_int)
        if isinstance(i, str):
            components = OrderedDict()
            components[i] = None
        else:
            components = OrderedDict()
            if (len(self_._index_to_components) == 1
                    and None in self_._index_to_components):
                for c in self_._index_to_components[None]:
                    components[c] = None
            else:
                for c in self_.index_to_components(i):
                    components[c] = None
        _enable_string_components(components, output)
        return output

    PatchInstanceMethod(function_space, "sub", custom_sub).patch()

    _preserve_root_space_after_sub(function_space, None)

    original_extract_sub_space = function_space.extract_sub_space

    def custom_extract_sub_space(self_, i):
        i_int = _convert_component_to_int(self_, i)
        output = original_extract_sub_space(i_int)
        if isinstance(i, str):
            components = OrderedDict()
            components[i] = None
        else:
            components = OrderedDict()
            for c in self_.index_to_components(i):
                components[c] = None
        _enable_string_components(components, output)
        return output

    PatchInstanceMethod(function_space, "extract_sub_space",
                        custom_extract_sub_space).patch()
 def build_error_estimation_operators(self, current_stage="offline"):
     # Call parent's method (enforcing an empty parent call to _build_error_estimation_operators)
     self.disable_build_error_estimation_operators = PatchInstanceMethod(self, "_build_error_estimation_operators", lambda self_, current_stage="offline": None) # may be shared between DEIM and exact evaluation
     self.disable_build_error_estimation_operators.patch()
     ParametrizedReducedDifferentialProblem_DecoratedClass.build_error_estimation_operators(self, current_stage)
     self.disable_build_error_estimation_operators.unpatch()
     del self.disable_build_error_estimation_operators
     # Then, build error estimators associated to DEIM operators
     self._build_error_estimation_operators_DEIM(current_stage)
 def init(self, current_stage="online"):
     # Call parent's method (enforcing an empty parent call to _init_operators)
     self.disable_init_operators = PatchInstanceMethod(self, "_init_operators", lambda self_, current_stage="online": None) # may be shared between DEIM and exact evaluation
     self.disable_init_operators.patch()
     ParametrizedReducedDifferentialProblem_DerivedClass.init(self, current_stage)
     self.disable_init_operators.unpatch()
     del self.disable_init_operators
     # Then, initialize DEIM operators
     self._init_operators_DEIM(current_stage)
示例#6
0
 def init(self):
     # Call parent's method (enforcing an empty parent call to _init_operators)
     self.disable_init_operators = PatchInstanceMethod(
         self, "_init_operators", lambda self_: None)
     # self.disable_init_operators may be shared between EIM and exact evaluation
     self.disable_init_operators.patch()
     ParametrizedDifferentialProblem_DerivedClass.init(self)
     self.disable_init_operators.unpatch()
     del self.disable_init_operators
     # Then, initialize EIM operators
     self._init_operators_EIM()
示例#7
0
 def init(self):
     has_disable_init_operators = hasattr(self, "disable_init_operators") # may be shared between EIM/DEIM and exact evaluation
     # Call parent's method (enforcing an empty parent call to _init_operators)
     if not has_disable_init_operators:
         self.disable_init_operators = PatchInstanceMethod(self, "_init_operators", lambda self_: None)
     self.disable_init_operators.patch()
     ParametrizedDifferentialProblem_DerivedClass.init(self)
     self.disable_init_operators.unpatch()
     if not has_disable_init_operators:
         del self.disable_init_operators
     # Then, initialize exact operators
     self._init_operators_exact()
 def build_error_estimation_operators(self, current_stage="offline"):
     has_disable_build_error_estimation_operators = hasattr(self, "disable_build_error_estimation_operators") # may be shared between EIM/DEIM and exact evaluation
     # Call parent's method (enforcing an empty parent call to _build_error_estimation_operators)
     if not has_disable_build_error_estimation_operators:
         self.disable_build_error_estimation_operators = PatchInstanceMethod(self, "_build_error_estimation_operators", lambda self_, current_stage="offline": None)
     self.disable_build_error_estimation_operators.patch()
     ParametrizedReducedDifferentialProblem_DecoratedClass.build_error_estimation_operators(self, current_stage)
     self.disable_build_error_estimation_operators.unpatch()
     if not has_disable_build_error_estimation_operators:
         del self.disable_build_error_estimation_operators
     # Then, build exact operators
     self._build_error_estimation_operators_exact(current_stage)
 def init(self, current_stage="online"):
     # self.disable_init_error_estimation_operators may be shared between EIM/DEIM and exact evaluation
     has_disable_init_error_estimation_operators = hasattr(
         self, "disable_init_error_estimation_operators")
     # Call parent's method (enforcing an empty parent call to _init_error_estimation_operators)
     if not has_disable_init_error_estimation_operators:
         self.disable_init_error_estimation_operators = PatchInstanceMethod(
             self, "_init_error_estimation_operators", lambda self_, current_stage="online": None)
     self.disable_init_error_estimation_operators.patch()
     ParametrizedReducedDifferentialProblem_DecoratedClass.init(self, current_stage)
     self.disable_init_error_estimation_operators.unpatch()
     if not has_disable_init_error_estimation_operators:
         del self.disable_init_error_estimation_operators
     # Then, initialize error estimation operators associated to exact operators
     self._init_error_estimation_operators_exact(current_stage)
示例#10
0
                def patch_functions_list_enrich(component_name,
                                                functions_list):
                    original_functions_list_enrich = functions_list.enrich

                    def patched_functions_list_enrich(self_,
                                                      functions,
                                                      component=None,
                                                      weights=None,
                                                      copy=True):
                        # Append to storage
                        original_functions_list_enrich(functions, component,
                                                       weights, copy)
                        # Update component name to basis component length
                        self._update_component_name_to_basis_component_length(
                            component_name if component is None else component)
                        # Reset precomputed sub components
                        self._precomputed_sub_components.clear()
                        # Prepare trivial precomputed sub components
                        self._prepare_trivial_precomputed_sub_components()
                        # Reset precomputed slices
                        self._precomputed_slices.clear()
                        # Prepare trivial precomputed slice
                        self._prepare_trivial_precomputed_slice()

                    functions_list.enrich_patch = PatchInstanceMethod(
                        functions_list, "enrich",
                        patched_functions_list_enrich)
                    functions_list.enrich_patch.patch()
        def patch_delayed_functions_list_enrich(component_name, memory):
            original_delayed_functions_list_enrich = memory.enrich

            def patched_delayed_functions_list_enrich(self_,
                                                      functions,
                                                      component=None,
                                                      weights=None,
                                                      copy=True):
                # Append to storage
                original_delayed_functions_list_enrich(functions, component,
                                                       weights, copy)
                # Update component name to basis component length
                if component is not None:
                    if isinstance(component, dict):
                        assert len(component) == 1
                        for (_, component_to) in component.items():
                            break
                        assert component_name == component_to
                    else:
                        assert component_name == component
                self._update_component_name_to_basis_component_length(
                    component_name)
                # Reset precomputed slices
                self._precomputed_slices.clear()
                # Prepare trivial precomputed slice
                self._prepare_trivial_precomputed_slice()

            memory.enrich_patch = PatchInstanceMethod(
                memory, "enrich", patched_delayed_functions_list_enrich)
            memory.enrich_patch.patch()
示例#12
0
    def custom_sub(self_, i):
        assert i is not None
        i_int = _convert_component_to_int(self_, i)
        if i_int is None:

            def custom_collapse(self_, collapsed_dofs=False):
                assert not collapsed_dofs
                return self_

            PatchInstanceMethod(self_, "collapse", custom_collapse).patch()
            return self_
        assert isinstance(i_int, (int, tuple))
        if isinstance(i_int, int):
            output = original_sub(i_int)
        else:
            output = self_.extract_sub_space(i_int)
        if isinstance(i, str):
            components = OrderedDict()
            components[i] = None
        else:
            components = OrderedDict()
            if (len(self_._index_to_components) == 1
                    and None in self_._index_to_components):
                for c in self_._index_to_components[None]:
                    components[c] = None
            else:
                for c in self_.index_to_components(i):
                    components[c] = None
        _enable_string_components(components, output)
        return output
示例#13
0
    def _init_dirichlet_bc(self):
        """
        Initialize boundary conditions required for the offline phase. Internal method.
        """
        # Get helper strings depending on the number of basis components
        n_components = len(self.components)
        assert n_components > 0
        if n_components > 1:
            dirichlet_bc_string = "dirichlet_bc_{c}"
        else:
            dirichlet_bc_string = "dirichlet_bc"
        # Assemble Dirichlet BCs
        # we do not assert for
        # (self.dirichlet_bc is None) == (self.dirichlet_bc_are_homogeneous is None)
        # because self.dirichlet_bc may still be None after initialization, if there
        # were no Dirichlet BCs at all and the problem had only one component
        if self.dirichlet_bc_are_homogeneous is None:  # init was not called already
            dirichlet_bc = dict()
            dirichlet_bc_are_homogeneous = dict()
            for component in self.components:
                try:
                    operator_bc = AffineExpansionStorage(
                        self.assemble_operator(dirichlet_bc_string.format(c=component)))
                except ValueError:  # there were no Dirichlet BCs
                    dirichlet_bc[component] = None
                    dirichlet_bc_are_homogeneous[component] = False
                else:
                    dirichlet_bc[component] = operator_bc
                    try:
                        self.compute_theta(dirichlet_bc_string.format(c=component))
                    except ValueError:  # there were no theta functions
                        # We provide in this case a shortcut for the case of homogeneous Dirichlet BCs,
                        # that do not require an additional lifting functions.
                        # The user needs to implement the dirichlet_bc case for assemble_operator,
                        # but not the one in compute_theta (since theta would not matter, being multiplied by zero)
                        def generate_modified_compute_theta(component, operator_bc):
                            standard_compute_theta = self.compute_theta

                            def modified_compute_theta(self_, term):
                                if term == dirichlet_bc_string.format(c=component):
                                    return (0., ) * len(operator_bc)
                                else:
                                    return standard_compute_theta(term)

                            return modified_compute_theta

                        PatchInstanceMethod(
                            self, "compute_theta", generate_modified_compute_theta(component, operator_bc)).patch()
                        dirichlet_bc_are_homogeneous[component] = True
                    else:
                        dirichlet_bc_are_homogeneous[component] = False
            if n_components == 1:
                self.dirichlet_bc = dirichlet_bc[self.components[0]]
                self.dirichlet_bc_are_homogeneous = dirichlet_bc_are_homogeneous[self.components[0]]
            else:
                self.dirichlet_bc = dirichlet_bc
                self.dirichlet_bc_are_homogeneous = dirichlet_bc_are_homogeneous
            assert self._combined_and_homogenized_dirichlet_bc is None
            self._combined_and_homogenized_dirichlet_bc = self._combine_and_homogenize_all_dirichlet_bcs()
        def _init_basis_functions(self, current_stage="online"):
            # Initialize basis functions as in Parent class
            ExactParametrizedFunctionsDecoratedReducedProblem_DerivedClass._init_basis_functions(
                self, current_stage)

            # Patch BasisFunctionsMatrix._update_component_name_to_basis_component_length so that it also updates
            # the map from each basis function to component and index after BasisFunctionsMatrix.enrich()
            # has been called.
            if not hasattr(
                    self.basis_functions,
                    "_update_component_name_to_basis_component_length_patched"
            ):

                @overload(AbstractBasisFunctionsMatrix, None)
                def patched_update_component_name_to_basis_component_length(
                        self_, component):
                    assert len(self_._components) == 1
                    assert len(self_._components_name) == 1
                    component_0 = self_._components_name[0]
                    _add_new_basis_functions_to_map_from_basis_function_to_component_and_index(
                        self_, component_0)

                @overload(AbstractBasisFunctionsMatrix, str)
                def patched_update_component_name_to_basis_component_length(
                        self_, component):
                    _add_new_basis_functions_to_map_from_basis_function_to_component_and_index(
                        self_, component)

                @overload(AbstractBasisFunctionsMatrix, dict_of(str, str))
                def patched_update_component_name_to_basis_component_length(
                        self_, component):
                    assert len(component) == 1
                    for (_, component_to) in component.items():
                        break
                    assert component_to in self_._components
                    _add_new_basis_functions_to_map_from_basis_function_to_component_and_index(
                        self_, component_to)

                def _add_new_basis_functions_to_map_from_basis_function_to_component_and_index(
                        self_, component):
                    old_component_length = self_._component_name_to_basis_component_length[
                        component]
                    self_._component_name_to_basis_component_length[
                        component] = len(self_._components[component])
                    new_component_length = self_._component_name_to_basis_component_length[
                        component]
                    for index in range(old_component_length,
                                       new_component_length):
                        add_to_map_from_basis_function_to_component_and_index(
                            self_._components[component][index], component,
                            index)

                # Apply patch
                PatchInstanceMethod(
                    self.basis_functions,
                    "_update_component_name_to_basis_component_length",
                    patched_update_component_name_to_basis_component_length
                ).patch()
                self.basis_functions._update_component_name_to_basis_component_length_patched = True
 def _patch_save_load(expansion_storage, method):
     if not hasattr(expansion_storage, method + "_patched"):
         original_method = getattr(expansion_storage, method)
         def patched_method(self, directory, filename):
             # Get full directory name
             full_directory = Folders.Folder(os.path.join(str(directory), _OfflineOnlineExpansionStorage_Base._current_stage))
             full_directory.create()
             # Call original implementation
             return original_method(full_directory, filename)
         PatchInstanceMethod(expansion_storage, method, patched_method).patch()
         setattr(expansion_storage, method + "_patched", True)
示例#16
0
def _preserve_root_space_after_sub(function_space, root_space_after_sub):
    function_space._root_space_after_sub = root_space_after_sub

    original_sub = function_space.sub

    def custom_sub(self_, i):
        output = original_sub(i)
        _preserve_root_space_after_sub(output, self_)
        return output

    PatchInstanceMethod(function_space, "sub", custom_sub).patch()
示例#17
0
def _init_component_to_index(components, function_space):
    assert isinstance(components, (list, OrderedDict))
    if isinstance(components, list):
        function_space._component_to_index = OrderedDict()
        for (index, component) in enumerate(components):
            _init_component_to_index__recursive(
                component, function_space._component_to_index, index)
    else:
        function_space._component_to_index = components
    function_space._index_to_components = dict()
    for (component, index) in function_space._component_to_index.items():
        assert isinstance(index, (int, tuple)) or index is None
        if isinstance(index, int) or index is None:
            components = function_space._index_to_components.get(index, list())
            components.append(component)
            function_space._index_to_components[index] = components
        elif isinstance(index, tuple):
            for i in range(1, len(index) + 1):
                index_i = index[:i] if i > 1 else index[0]
                components = function_space._index_to_components.get(
                    index_i, list())
                components.append(component)
                function_space._index_to_components[index_i] = components
        else:
            raise TypeError("Invalid index")

    def component_to_index(self_, i):
        return self_._component_to_index[i]

    AttachInstanceMethod(function_space, "component_to_index",
                         component_to_index).attach()

    def index_to_components(self_, c):
        return self_._index_to_components[c]

    AttachInstanceMethod(function_space, "index_to_components",
                         index_to_components).attach()

    original_collapse = function_space.collapse

    def custom_collapse(self_, collapsed_dofs=False):
        if not collapsed_dofs:
            output = original_collapse(collapsed_dofs)
        else:
            output, collapsed_dofs_dict = original_collapse(collapsed_dofs)
        if hasattr(self_, "_component_to_index"):
            _init_component_to_index(self_._component_to_index, output)
        if not collapsed_dofs:
            return output
        else:
            return output, collapsed_dofs_dict

    PatchInstanceMethod(function_space, "collapse", custom_collapse).patch()
            def _patch_truth_solve(self, force, **kwargs):
                DifferentialProblemReductionMethod_DerivedClass._patch_truth_solve(
                    self, **kwargs)
                if "with_respect_to" in kwargs:
                    assert inspect.isfunction(kwargs["with_respect_to"])
                    other_dual_truth_problem = kwargs["with_respect_to"](
                        self.dual_truth_problem)

                    def patched_dual_truth_solve(self_, **kwargs_):
                        other_dual_truth_problem.solve(**kwargs_)
                        assign(self.dual_truth_problem._solution,
                               other_dual_truth_problem._solution)
                        return self.dual_truth_problem._solution

                    self.patch_dual_truth_solve = PatchInstanceMethod(
                        self.dual_truth_problem, "solve",
                        patched_dual_truth_solve)
                    self.patch_dual_truth_solve.patch()

                    # Initialize the affine expansion in the other dual truth problem
                    other_dual_truth_problem.init()
                else:
                    other_dual_truth_problem = self.dual_truth_problem

                # Clean up solution caching and disable I/O
                if force:
                    # Make sure to clean up problem and reduced problem solution cache to ensure that
                    # solution and reduced solution are actually computed
                    other_dual_truth_problem._solution_cache.clear()
                    other_dual_truth_problem._output_cache.clear()

                    # Disable the capability of importing/exporting dual truth solutions
                    def disable_import_solution_method(self_,
                                                       folder=None,
                                                       filename=None,
                                                       solution=None,
                                                       component=None,
                                                       suffix=None):
                        raise OSError

                    self.disable_import_dual_solution = PatchInstanceMethod(
                        other_dual_truth_problem, "import_solution",
                        disable_import_solution_method)

                    def disable_export_solution_method(self_,
                                                       folder=None,
                                                       filename=None,
                                                       solution=None,
                                                       component=None,
                                                       suffix=None):
                        pass

                    self.disable_export_dual_solution = PatchInstanceMethod(
                        other_dual_truth_problem, "export_solution",
                        disable_export_solution_method)
                    self.disable_import_dual_solution.patch()
                    self.disable_export_dual_solution.patch()
示例#19
0
def patch_test_item(test_item):
    """
    Handle the execution of the test.
    """
    
    subdirectory = os.path.join(test_item.originalname + "_tempdir", test_item.callspec.getparam("expression_type"), test_item.callspec.getparam("basis_generation"))
    original_runtest = test_item.runtest
    
    @run_and_compare_to_gold(subdirectory)
    def runtest(self):
        disable_matplotlib()
        original_runtest()
        enable_matplotlib()
        
    PatchInstanceMethod(test_item, "runtest", runtest).patch()
示例#20
0
        def _patch_truth_compute_output(self, force, **kwargs):
            if "with_respect_to" in kwargs:
                assert inspect.isfunction(kwargs["with_respect_to"])
                other_truth_problem = kwargs["with_respect_to"](
                    self.truth_problem)

                def patched_truth_compute_output(self_):
                    other_truth_problem.compute_output()
                    assign(self.truth_problem._output,
                           other_truth_problem._output)
                    assign(self.truth_problem._output_over_time,
                           other_truth_problem._output_over_time)
                    return self.truth_problem._output_over_time

                self.patch_truth_compute_output = PatchInstanceMethod(
                    self.truth_problem, "compute_output",
                    patched_truth_compute_output)
                self.patch_truth_compute_output.patch()

                # Initialize the affine expansion in the other truth problem
                other_truth_problem.init()
            else:
                other_truth_problem = self.truth_problem

            # Clean up output caching and disable I/O
            if force:
                # Make sure to clean up problem and reduced problem output cache to ensure that
                # output and reduced output are actually computed
                other_truth_problem._output_over_time_cache.clear()
                self.reduced_problem._output_over_time_cache.clear()

                # Disable the capability of importing/exporting truth output
                def disable_import_output_method(self_,
                                                 folder=None,
                                                 filename=None,
                                                 output_over_time=None,
                                                 suffix=None):
                    raise OSError

                self.disable_import_output = PatchInstanceMethod(
                    other_truth_problem, "import_output",
                    disable_import_output_method)
                self.disable_import_output.patch()

                def disable_export_output_method(self_,
                                                 folder=None,
                                                 filename=None,
                                                 output_over_time=None,
                                                 suffix=None):
                    pass

                self.disable_export_output = PatchInstanceMethod(
                    other_truth_problem, "export_output",
                    disable_export_output_method)
                self.disable_export_output.patch()
 def _patch_truth_solve(self, force, **kwargs):
     if "with_respect_to" in kwargs:
         assert inspect.isfunction(kwargs["with_respect_to"])
         other_truth_problem = kwargs["with_respect_to"](self.truth_problem)
         def patched_truth_solve(self_, **kwargs_):
             other_truth_problem.solve(**kwargs_)
             assign(self.truth_problem._solution, other_truth_problem._solution)
             assign(self.truth_problem._solution_dot, other_truth_problem._solution_dot)
             assign(self.truth_problem._solution_over_time, other_truth_problem._solution_over_time)
             assign(self.truth_problem._solution_dot_over_time, other_truth_problem._solution_dot_over_time)
             return self.truth_problem._solution_over_time
             
         self.patch_truth_solve = PatchInstanceMethod(
             self.truth_problem,
             "solve",
             patched_truth_solve
         )
         self.patch_truth_solve.patch()
         
         # Initialize the affine expansion in the other truth problem
         other_truth_problem.init()
     else:
         other_truth_problem = self.truth_problem
         
     # Clean up solution caching and disable I/O
     if force:
         # Make sure to clean up problem and reduced problem solution cache to ensure that
         # solution and reduced solution are actually computed
         other_truth_problem._solution_cache.clear()
         other_truth_problem._solution_dot_cache.clear()
         other_truth_problem._solution_over_time_cache.clear()
         other_truth_problem._solution_dot_over_time_cache.clear()
         other_truth_problem._output_cache.clear()
         other_truth_problem._output_over_time_cache.clear()
         self.reduced_problem._solution_cache.clear()
         self.reduced_problem._solution_dot_cache.clear()
         self.reduced_problem._solution_over_time_cache.clear()
         self.reduced_problem._solution_dot_over_time_cache.clear()
         self.reduced_problem._output_cache.clear()
         self.reduced_problem._output_over_time_cache.clear()
         
         # Disable the capability of importing/exporting truth solutions
         self.disable_import_solution = PatchInstanceMethod(
             other_truth_problem,
             "import_solution",
             lambda self_, folder=None, filename=None, solution_over_time=None, component=None, suffix=None: False
         )
         self.disable_export_solution = PatchInstanceMethod(
             other_truth_problem,
             "export_solution",
             lambda self_, folder=None, filename=None, solution_over_time=None, component=None, suffix=None: None
         )
         self.disable_import_solution.patch()
         self.disable_export_solution.patch()
示例#22
0
 def _init_basis_functions(self, current_stage="online"):
     # Initialize basis functions as in Parent class
     ExactParametrizedFunctionsDecoratedReducedProblem_DerivedClass._init_basis_functions(self, current_stage)
     
     # Populate basis functions to reduced problem map
     add_to_map_from_basis_functions_to_reduced_problem(self.basis_functions, self)
     
     # Add basis functions matrix obtained through sub components to the map as well, by patching
     # BasisFunctionsMatrix._precompute_sub_components
     if not hasattr(self.basis_functions, "_precompute_sub_components_patched"):
         original_precompute_sub_components = self.basis_functions._precompute_sub_components
         def patched_precompute_sub_components(self_, sub_components):
             output = original_precompute_sub_components(sub_components)
             add_to_map_from_basis_functions_to_reduced_problem(output, self)
             return output
         # Apply patch
         PatchInstanceMethod(self.basis_functions, "_precompute_sub_components", patched_precompute_sub_components).patch()
         self.basis_functions._precompute_sub_components_patched = True
示例#23
0
    def __setitem__(self, key, value):
        """
        Set key in both RAM and disk storage.
        """
        from rbnics.backends.abstract import TimeSeries
        from rbnics.utils.test import PatchInstanceMethod
        assert isinstance(value, TimeSeries)
        if self._filename_generator is not None:
            # Patch value's append method to save to file
            (args, kwargs, storage_key) = self._compute_storage_key(key)
            storage_filename = self._filename_generator(*args, **kwargs)
            original_append = value.append

            def patched_append(self_, item):
                self._export(storage_filename, item, len(self_))
                original_append(item)

            PatchInstanceMethod(value, "append", patched_append).patch()
        # Call standard setitem, disabling export
        bak_filename_generator = self._filename_generator
        self._filename_generator = None
        Cache.__setitem__(self, key, value)
        self._filename_generator = bak_filename_generator
示例#24
0
    def patched_offline_method(self_):
        # Patch truth_problem's export_solution
        assert (hasattr(self_, "truth_problem")
                or hasattr(self_, "EIM_approximation"))
        if hasattr(self_, "truth_problem"):  # differential problem
            truth_problem = self_.truth_problem
        elif hasattr(self_, "EIM_approximation"):  # EIM
            truth_problem = self_.EIM_approximation
        else:
            raise AttributeError("Invalid truth problem attribute.")
        export_solution_patch = PatchInstanceMethod(
            truth_problem, "export_solution",
            patched_export_solution(truth_problem, self_.folder["snapshots"]))
        export_solution_patch.patch()

        # Call standard offline
        reduced_problem = offline_method(self_)

        # Disable patch
        export_solution_patch.unpatch()

        # Return generated reduced problem
        return reduced_problem
示例#25
0
class EIMApproximationReductionMethod(ReductionMethod):
    
    # Default initialization of members
    def __init__(self, EIM_approximation):
        # Call the parent initialization
        ReductionMethod.__init__(self, EIM_approximation.folder_prefix)
        
        # $$ OFFLINE DATA STRUCTURES $$ #
        # High fidelity problem
        self.EIM_approximation = EIM_approximation
        # Declare a new container to store the snapshots
        self.snapshots_container = self.EIM_approximation.parametrized_expression.create_snapshots_container()
        self._training_set_parameters_to_snapshots_container_index = dict()
        # I/O
        self.folder["snapshots"] = os.path.join(self.folder_prefix, "snapshots")
        self.folder["post_processing"] = os.path.join(self.folder_prefix, "post_processing")
        self.greedy_selected_parameters = GreedySelectedParametersList()
        self.greedy_errors = GreedyErrorEstimatorsList()
        #
        # By default set a tolerance slightly larger than zero, in order to
        # stop greedy iterations in trivial cases by default
        self.tol = 1e-15
    
    def initialize_training_set(self, ntrain, enable_import=True, sampling=None, **kwargs):
        import_successful = ReductionMethod.initialize_training_set(self, self.EIM_approximation.mu_range, ntrain, enable_import, sampling, **kwargs)
        # Since exact evaluation is required, we cannot use a distributed training set
        self.training_set.distributed_max = False
        # Also initialize the map from parameter values to snapshots container index
        self._training_set_parameters_to_snapshots_container_index = dict((mu, mu_index) for (mu_index, mu) in enumerate(self.training_set))
        return import_successful
        
    def initialize_testing_set(self, ntest, enable_import=False, sampling=None, **kwargs):
        return ReductionMethod.initialize_testing_set(self, self.EIM_approximation.mu_range, ntest, enable_import, sampling, **kwargs)
    
    # Perform the offline phase of EIM
    def offline(self):
        need_to_do_offline_stage = self._init_offline()
        if need_to_do_offline_stage:
            self._offline()
        self._finalize_offline()
        return self.EIM_approximation
        
    # Initialize data structures required for the offline phase
    def _init_offline(self):
        # Prepare folders and init EIM approximation
        all_folders = Folders()
        all_folders.update(self.folder)
        all_folders.update(self.EIM_approximation.folder)
        all_folders.pop("testing_set") # this is required only in the error/speedup analysis
        all_folders.pop("error_analysis") # this is required only in the error analysis
        all_folders.pop("speedup_analysis") # this is required only in the speedup analysis
        at_least_one_folder_created = all_folders.create()
        if not at_least_one_folder_created:
            return False # offline construction should be skipped, since data are already available
        else:
            self.EIM_approximation.init("offline")
            return True # offline construction should be carried out
        
    def _offline(self):
        interpolation_method_name = self.EIM_approximation.parametrized_expression.interpolation_method_name()
        description = self.EIM_approximation.parametrized_expression.description()
        
        # Evaluate the parametrized expression for all parameters in the training set
        print(TextBox(interpolation_method_name + " preprocessing phase begins for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        for (mu_index, mu) in enumerate(self.training_set):
            print(TextLine(interpolation_method_name + " " + str(mu_index), fill=":"))
            
            self.EIM_approximation.set_mu(mu)
            
            print("evaluate parametrized expression at mu =", mu)
            self.EIM_approximation.evaluate_parametrized_expression()
            self.EIM_approximation.export_solution(self.folder["snapshots"], "truth_" + str(mu_index))
            
            print("add to snapshots")
            self.add_to_snapshots(self.EIM_approximation.snapshot)

            print("")
            
        # If basis generation is POD, compute the first POD modes of the snapshots
        if self.EIM_approximation.basis_generation == "POD":
            print("compute basis")
            N_POD = self.compute_basis_POD()
            print("")
        
        print(TextBox(interpolation_method_name + " preprocessing phase ends for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        print(TextBox(interpolation_method_name + " offline phase begins for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        if self.EIM_approximation.basis_generation == "Greedy":
            # Arbitrarily start from the first parameter in the training set
            self.EIM_approximation.set_mu(self.training_set[0])
            
            # Carry out greedy selection
            relative_error_max = 2.*self.tol
            while self.EIM_approximation.N < self.Nmax and relative_error_max >= self.tol:
                print(TextLine(interpolation_method_name + " N = " + str(self.EIM_approximation.N), fill=":"))
                
                self._print_greedy_interpolation_solve_message()
                self.EIM_approximation.solve()
                
                print("compute and locate maximum interpolation error")
                self.EIM_approximation.snapshot = self.load_snapshot()
                (error, maximum_error, maximum_location) = self.EIM_approximation.compute_maximum_interpolation_error()
                
                print("update locations with", maximum_location)
                self.update_interpolation_locations(maximum_location)
                
                print("update basis")
                self.update_basis_greedy(error, maximum_error)
                
                print("update interpolation matrix")
                self.update_interpolation_matrix()
                
                (error_max, relative_error_max) = self.greedy()
                print("maximum interpolation error =", error_max)
                print("maximum interpolation relative error =", relative_error_max)
                
                print("")
        else:
            while self.EIM_approximation.N < N_POD:
                print(TextLine(interpolation_method_name + " N = " + str(self.EIM_approximation.N), fill=":"))
            
                print("solve interpolation for basis number", self.EIM_approximation.N)
                self.EIM_approximation._solve(self.EIM_approximation.basis_functions[self.EIM_approximation.N])
                
                print("compute and locate maximum interpolation error")
                self.EIM_approximation.snapshot = self.EIM_approximation.basis_functions[self.EIM_approximation.N]
                (error, maximum_error, maximum_location) = self.EIM_approximation.compute_maximum_interpolation_error()
                
                print("update locations with", maximum_location)
                self.update_interpolation_locations(maximum_location)
                
                self.EIM_approximation.N += 1
                
                print("update interpolation matrix")
                self.update_interpolation_matrix()
                
                print("")
                
        print(TextBox(interpolation_method_name + " offline phase ends for" + "\n" + "\n".join(description), fill="="))
        print("")
        
    # Finalize data structures required after the offline phase
    def _finalize_offline(self):
        self.EIM_approximation.init("online")
        
    def _print_greedy_interpolation_solve_message(self):
        print("solve interpolation for mu =", self.EIM_approximation.mu)
        
    # Update the snapshots container
    def add_to_snapshots(self, snapshot):
        self.snapshots_container.enrich(snapshot)
        
    # Update basis (greedy version)
    def update_basis_greedy(self, error, maximum_error):
        if abs(maximum_error) > 0.:
            self.EIM_approximation.basis_functions.enrich(error/maximum_error)
        else:
            # Trivial case, greedy will stop at the first iteration
            assert self.EIM_approximation.N == 0
            self.EIM_approximation.basis_functions.enrich(error) # error is actually zero
        self.EIM_approximation.basis_functions.save(self.EIM_approximation.folder["basis"], "basis")
        self.EIM_approximation.N += 1

    # Update basis (POD version)
    def compute_basis_POD(self):
        POD = self.EIM_approximation.parametrized_expression.create_POD_container()
        POD.store_snapshot(self.snapshots_container)
        (_, _, basis_functions, N) = POD.apply(self.Nmax, self.tol)
        self.EIM_approximation.basis_functions.enrich(basis_functions)
        self.EIM_approximation.basis_functions.save(self.EIM_approximation.folder["basis"], "basis")
        # do not increment self.EIM_approximation.N
        POD.print_eigenvalues(N)
        POD.save_eigenvalues_file(self.folder["post_processing"], "eigs")
        POD.save_retained_energy_file(self.folder["post_processing"], "retained_energy")
        return N
        
    def update_interpolation_locations(self, maximum_location):
        self.EIM_approximation.interpolation_locations.append(maximum_location)
        self.EIM_approximation.interpolation_locations.save(self.EIM_approximation.folder["reduced_operators"], "interpolation_locations")
    
    # Assemble the interpolation matrix
    def update_interpolation_matrix(self):
        self.EIM_approximation.interpolation_matrix[0] = evaluate(self.EIM_approximation.basis_functions[:self.EIM_approximation.N], self.EIM_approximation.interpolation_locations)
        self.EIM_approximation.interpolation_matrix.save(self.EIM_approximation.folder["reduced_operators"], "interpolation_matrix")
            
    # Load the precomputed snapshot
    def load_snapshot(self):
        assert self.EIM_approximation.basis_generation == "Greedy"
        mu = self.EIM_approximation.mu
        mu_index = self._training_set_parameters_to_snapshots_container_index[mu]
        assert mu == self.training_set[mu_index]
        return self.snapshots_container[mu_index]
        
    # Choose the next parameter in the offline stage in a greedy fashion
    def greedy(self):
        assert self.EIM_approximation.basis_generation == "Greedy"
        
        # Print some additional information on the consistency of the reduced basis
        self.EIM_approximation.solve()
        self.EIM_approximation.snapshot = self.load_snapshot()
        error = self.EIM_approximation.snapshot - self.EIM_approximation.basis_functions*self.EIM_approximation._interpolation_coefficients
        error_on_interpolation_locations = evaluate(error, self.EIM_approximation.interpolation_locations)
        (maximum_error, _) = max(abs(error))
        (maximum_error_on_interpolation_locations, _) = max(abs(error_on_interpolation_locations)) # for consistency check, should be zero
        print("interpolation error for current mu =", abs(maximum_error))
        print("interpolation error on interpolation locations for current mu =", abs(maximum_error_on_interpolation_locations))
        
        # Carry out the actual greedy search
        def solve_and_computer_error(mu):
            self.EIM_approximation.set_mu(mu)
            
            self.EIM_approximation.solve()
            self.EIM_approximation.snapshot = self.load_snapshot()
            (_, maximum_error, _) = self.EIM_approximation.compute_maximum_interpolation_error()
            return abs(maximum_error)
            
        print("find next mu")
        (error_max, error_argmax) = self.training_set.max(solve_and_computer_error)
        self.EIM_approximation.set_mu(self.training_set[error_argmax])
        self.greedy_selected_parameters.append(self.training_set[error_argmax])
        self.greedy_selected_parameters.save(self.folder["post_processing"], "mu_greedy")
        self.greedy_errors.append(error_max)
        self.greedy_errors.save(self.folder["post_processing"], "error_max")
        if abs(self.greedy_errors[0]) > 0.:
            return (abs(error_max), abs(error_max/self.greedy_errors[0]))
        else:
            # Trivial case, greedy will stop at the first iteration
            assert len(self.greedy_errors) == 1
            assert self.EIM_approximation.N == 1
            return (0., 0.)
    
    # Compute the error of the empirical interpolation approximation with respect to the
    # exact function over the testing set
    def error_analysis(self, N_generator=None, filename=None, **kwargs):
        assert len(kwargs) == 0 # not used in this method
            
        self._init_error_analysis(**kwargs)
        self._error_analysis(N_generator, filename, **kwargs)
        self._finalize_error_analysis(**kwargs)
        
    def _error_analysis(self, N_generator=None, filename=None, **kwargs):
        if N_generator is None:
            def N_generator(n):
                return n
                
        N = self.EIM_approximation.N
        interpolation_method_name = self.EIM_approximation.parametrized_expression.interpolation_method_name()
        description = self.EIM_approximation.parametrized_expression.description()
        
        print(TextBox(interpolation_method_name + " error analysis begins for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        error_analysis_table = ErrorAnalysisTable(self.testing_set)
        error_analysis_table.set_Nmax(N)
        error_analysis_table.add_column("error", group_name="eim", operations=("mean", "max"))
        error_analysis_table.add_column("relative_error", group_name="eim", operations=("mean", "max"))
        
        for (mu_index, mu) in enumerate(self.testing_set):
            print(TextLine(interpolation_method_name + " " + str(mu_index), fill=":"))
            
            self.EIM_approximation.set_mu(mu)
            
            # Evaluate the exact function on the truth grid
            self.EIM_approximation.evaluate_parametrized_expression()
            
            for n in range(1, N + 1): # n = 1, ... N
                n_arg = N_generator(n)
                
                if n_arg is not None:
                    self.EIM_approximation.solve(n_arg)
                    (_, error, _) = self.EIM_approximation.compute_maximum_interpolation_error(n)
                    (_, relative_error, _) = self.EIM_approximation.compute_maximum_interpolation_relative_error(n)
                    error_analysis_table["error", n, mu_index] = abs(error)
                    error_analysis_table["relative_error", n, mu_index] = abs(relative_error)
                else:
                    error_analysis_table["error", n, mu_index] = NotImplemented
                    error_analysis_table["relative_error", n, mu_index] = NotImplemented
        
        # Print
        print("")
        print(error_analysis_table)
        
        print("")
        print(TextBox(interpolation_method_name + " error analysis ends for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        # Export error analysis table
        error_analysis_table.save(self.folder["error_analysis"], "error_analysis" if filename is None else filename)
        
    # Compute the speedup of the empirical interpolation approximation with respect to the
    # exact function over the testing set
    def speedup_analysis(self, N_generator=None, filename=None, **kwargs):
        assert len(kwargs) == 0 # not used in this method
            
        self._init_speedup_analysis(**kwargs)
        self._speedup_analysis(N_generator, filename, **kwargs)
        self._finalize_speedup_analysis(**kwargs)
        
    # Initialize data structures required for the speedup analysis phase
    def _init_speedup_analysis(self, **kwargs):
        # Make sure to clean up snapshot cache to ensure that parametrized
        # expression evaluation is actually carried out
        self.EIM_approximation.snapshot_cache.clear()
        # ... and also disable the capability of importing/exporting truth solutions
        self.disable_import_solution = PatchInstanceMethod(self.EIM_approximation, "import_solution", lambda self_, folder, filename, solution=None: False)
        self.disable_export_solution = PatchInstanceMethod(self.EIM_approximation, "export_solution", lambda self_, folder, filename, solution=None: None)
        self.disable_import_solution.patch()
        self.disable_export_solution.patch()
        
    def _speedup_analysis(self, N_generator=None, filename=None, **kwargs):
        if N_generator is None:
            def N_generator(n):
                return n
                
        N = self.EIM_approximation.N
        interpolation_method_name = self.EIM_approximation.parametrized_expression.interpolation_method_name()
        description = self.EIM_approximation.parametrized_expression.description()
        
        print(TextBox(interpolation_method_name + " speedup analysis begins for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        speedup_analysis_table = SpeedupAnalysisTable(self.testing_set)
        speedup_analysis_table.set_Nmax(N)
        speedup_analysis_table.add_column("speedup", group_name="speedup", operations=("min", "mean", "max"))
        
        evaluate_timer = Timer("parallel")
        EIM_timer = Timer("serial")
        
        for (mu_index, mu) in enumerate(self.testing_set):
            print(TextLine(interpolation_method_name + " " + str(mu_index), fill=":"))
            
            self.EIM_approximation.set_mu(mu)
            
            # Evaluate the exact function on the truth grid
            evaluate_timer.start()
            self.EIM_approximation.evaluate_parametrized_expression()
            elapsed_evaluate = evaluate_timer.stop()
            
            for n in range(1, N + 1): # n = 1, ... N
                n_arg = N_generator(n)
                
                if n_arg is not None:
                    EIM_timer.start()
                    self.EIM_approximation.solve(n_arg)
                    elapsed_EIM = EIM_timer.stop()
                    speedup_analysis_table["speedup", n, mu_index] = elapsed_evaluate/elapsed_EIM
                else:
                    speedup_analysis_table["speedup", n, mu_index] = NotImplemented
        
        # Print
        print("")
        print(speedup_analysis_table)
        
        print("")
        print(TextBox(interpolation_method_name + " speedup analysis ends for" + "\n" + "\n".join(description), fill="="))
        print("")
        
        # Export speedup analysis table
        speedup_analysis_table.save(self.folder["speedup_analysis"], "speedup_analysis" if filename is None else filename)
        
    # Finalize data structures required after the speedup analysis phase
    def _finalize_speedup_analysis(self, **kwargs):
        # Restore the capability to import/export truth solutions
        self.disable_import_solution.unpatch()
        self.disable_export_solution.unpatch()
        del self.disable_import_solution
        del self.disable_export_solution
 class _AlsoDecorateErrorEstimationOperators_Class(ParametrizedReducedDifferentialProblem_DecoratedClass):
     
     def init(self, current_stage="online"):
         # Call parent's method (enforcing an empty parent call to _init_error_estimation_operators)
         self.disable_init_error_estimation_operators = PatchInstanceMethod(self, "_init_error_estimation_operators", lambda self_, current_stage="online": None) # may be shared between DEIM and exact evaluation
         self.disable_init_error_estimation_operators.patch()
         ParametrizedReducedDifferentialProblem_DecoratedClass.init(self, current_stage)
         self.disable_init_error_estimation_operators.unpatch()
         del self.disable_init_error_estimation_operators
         # Then, initialize error estimation operators associated to DEIM operators
         self._init_error_estimation_operators_DEIM(current_stage)
         
     def _init_error_estimation_operators_DEIM(self, current_stage="online"):
         # Initialize offline/online switch storage only once (may be shared between DEIM and exact evaluation)
         OfflineOnlineExpansionStorage = self.offline_online_backend.OfflineOnlineExpansionStorage
         OfflineOnlineRieszSolver = self.offline_online_backend.OfflineOnlineRieszSolver
         OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
         if not isinstance(self.riesz, OfflineOnlineSwitch):
             assert isinstance(self.riesz, dict)
             assert len(self.riesz) is 0
             self.riesz = OfflineOnlineExpansionStorage(self, "RieszExpansionStorage")
         if not isinstance(self.error_estimation_operator, OfflineOnlineSwitch):
             assert isinstance(self.error_estimation_operator, dict)
             assert len(self.error_estimation_operator) is 0
             self.error_estimation_operator = OfflineOnlineExpansionStorage(self, "ErrorEstimationOperatorExpansionStorage")
         if not isinstance(self.RieszSolver, OfflineOnlineSwitch):
             assert inspect.isclass(self.RieszSolver)
             self.RieszSolver = OfflineOnlineRieszSolver()
         # Setup offline/online operators storage with DEIM operators
         assert current_stage in ("online", "offline")
         apply_DEIM_at_stages = self.truth_problem._apply_DEIM_at_stages
         if current_stage == "online":
             apply_DEIM_at_stages = ("online", ) if "online" in apply_DEIM_at_stages else ()
         for stage_DEIM in apply_DEIM_at_stages:
             OfflineOnlineSwitch.set_current_stage(stage_DEIM)
             self.riesz.set_is_affine(True)
             self.error_estimation_operator.set_is_affine(True)
             self.RieszSolver.set_is_affine(True)
             self._init_error_estimation_operators(current_stage)
             self.riesz.unset_is_affine()
             self.error_estimation_operator.unset_is_affine()
             self.RieszSolver.unset_is_affine()
         # Update current stage in offline/online switch
         OfflineOnlineSwitch.set_current_stage(current_stage)
         
     def build_error_estimation_operators(self, current_stage="offline"):
         # Call parent's method (enforcing an empty parent call to _build_error_estimation_operators)
         self.disable_build_error_estimation_operators = PatchInstanceMethod(self, "_build_error_estimation_operators", lambda self_, current_stage="offline": None) # may be shared between DEIM and exact evaluation
         self.disable_build_error_estimation_operators.patch()
         ParametrizedReducedDifferentialProblem_DecoratedClass.build_error_estimation_operators(self, current_stage)
         self.disable_build_error_estimation_operators.unpatch()
         del self.disable_build_error_estimation_operators
         # Then, build error estimators associated to DEIM operators
         self._build_error_estimation_operators_DEIM(current_stage)
         
     def _build_error_estimation_operators_DEIM(self, current_stage="offline"):
         # Build offline/online error estimators storage from DEIM operators
         OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
         assert current_stage == "offline"
         for stage_DEIM in self.truth_problem._apply_DEIM_at_stages:
             OfflineOnlineSwitch.set_current_stage(stage_DEIM)
             self._build_error_estimation_operators(current_stage)
             OfflineOnlineSwitch.set_current_stage(current_stage)
 class DEIMDecoratedReducedProblem_Class(ParametrizedReducedDifferentialProblem_DerivedClass):
     
     def __init__(self, truth_problem, **kwargs):
         # Call parent's method
         ParametrizedReducedDifferentialProblem_DerivedClass.__init__(self, truth_problem, **kwargs)
         
         # Copy offline online backend for current problem
         self.offline_online_backend = truth_problem.offline_online_backend
         
     def init(self, current_stage="online"):
         # Call parent's method (enforcing an empty parent call to _init_operators)
         self.disable_init_operators = PatchInstanceMethod(self, "_init_operators", lambda self_, current_stage="online": None) # may be shared between DEIM and exact evaluation
         self.disable_init_operators.patch()
         ParametrizedReducedDifferentialProblem_DerivedClass.init(self, current_stage)
         self.disable_init_operators.unpatch()
         del self.disable_init_operators
         # Then, initialize DEIM operators
         self._init_operators_DEIM(current_stage)
         
     def _init_operators_DEIM(self, current_stage="online"):
         # Initialize offline/online switch storage only once (may be shared between DEIM and exact evaluation)
         OfflineOnlineExpansionStorage = self.offline_online_backend.OfflineOnlineExpansionStorage
         OfflineOnlineExpansionStorageSize = self.offline_online_backend.OfflineOnlineExpansionStorageSize
         OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
         if not isinstance(self.Q, OfflineOnlineSwitch):
             assert isinstance(self.Q, dict)
             assert len(self.Q) is 0
             self.Q = OfflineOnlineExpansionStorageSize()
         if not isinstance(self.operator, OfflineOnlineSwitch):
             assert isinstance(self.operator, dict)
             assert len(self.operator) is 0
             self.operator = OfflineOnlineExpansionStorage(self, "OperatorExpansionStorage")
         # Setup offline/online operators storage with DEIM operators
         assert current_stage in ("online", "offline")
         apply_DEIM_at_stages = self.truth_problem._apply_DEIM_at_stages
         if current_stage == "online":
             apply_DEIM_at_stages = ("online", ) if "online" in apply_DEIM_at_stages else ()
         for stage_DEIM in apply_DEIM_at_stages:
             OfflineOnlineSwitch.set_current_stage(stage_DEIM)
             self.operator.set_is_affine(True)
             self._init_operators(current_stage)
             self.operator.unset_is_affine()
         # Update current stage in offline/online switch
         OfflineOnlineSwitch.set_current_stage(current_stage)
         
     def _solve(self, N, **kwargs):
         self._update_N_DEIM(**kwargs)
         ParametrizedReducedDifferentialProblem_DerivedClass._solve(self, N, **kwargs)
         
     def _update_N_DEIM(self, **kwargs):
         self.truth_problem._update_N_DEIM(**kwargs)
         
     def build_reduced_operators(self, current_stage="offline"):
         # Call parent's method (enforcing an empty parent call to _build_reduced_operators)
         self.disable_build_reduced_operators = PatchInstanceMethod(self, "_build_reduced_operators", lambda self_, current_stage="offline": None) # may be shared between DEIM and exact evaluation
         self.disable_build_reduced_operators.patch()
         ParametrizedReducedDifferentialProblem_DerivedClass.build_reduced_operators(self, current_stage)
         self.disable_build_reduced_operators.unpatch()
         del self.disable_build_reduced_operators
         # Then, build DEIM operators
         self._build_reduced_operators_DEIM(current_stage)
         
     def _build_reduced_operators_DEIM(self, current_stage="offline"):
         # Build offline/online operators storage from DEIM operators
         OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
         assert current_stage == "offline"
         for stage_DEIM in self.truth_problem._apply_DEIM_at_stages:
             OfflineOnlineSwitch.set_current_stage(stage_DEIM)
             self._build_reduced_operators(current_stage)
         # Update current stage in offline/online switch
         OfflineOnlineSwitch.set_current_stage(current_stage)
示例#28
0
        class ExactParametrizedFunctionsDecoratedProblem_Class(
                ParametrizedDifferentialProblem_DerivedClass):

            # Default initialization of members
            def __init__(self, V, **kwargs):
                # Call the parent initialization
                ParametrizedDifferentialProblem_DerivedClass.__init__(
                    self, V, **kwargs)
                # Storage for symbolic parameters
                self.mu_symbolic = None

                # Store values passed to decorator
                self._store_exact_evaluation_stages(stages)

                # Generate offline online backend for current problem
                self.offline_online_backend = OfflineOnlineBackend(self.name())

            @overload(str)
            def _store_exact_evaluation_stages(self, stage):
                assert stages != "online", "This choice does not make any sense because it requires an EIM/DEIM offline stage which then is not used online"
                assert stages == "offline"
                self._apply_exact_evaluation_at_stages = (stages, )

            @overload(tuple_of(str))
            def _store_exact_evaluation_stages(self, stage):
                assert len(stages) in (1, 2)
                assert stages[0] in ("offline", "online")
                if len(stages) > 1:
                    assert stages[1] in ("offline", "online")
                    assert stages[0] != stages[1]
                self._apply_exact_evaluation_at_stages = stages

            def init(self):
                has_disable_init_operators = hasattr(
                    self, "disable_init_operators"
                )  # may be shared between EIM/DEIM and exact evaluation
                # Call parent's method (enforcing an empty parent call to _init_operators)
                if not has_disable_init_operators:
                    self.disable_init_operators = PatchInstanceMethod(
                        self, "_init_operators", lambda self_: None)
                self.disable_init_operators.patch()
                ParametrizedDifferentialProblem_DerivedClass.init(self)
                self.disable_init_operators.unpatch()
                if not has_disable_init_operators:
                    del self.disable_init_operators
                # Then, initialize exact operators
                self._init_operators_exact()

            def _init_operators_exact(self):
                # Initialize symbolic parameters only once
                if self.mu_symbolic is None:
                    self.mu_symbolic = SymbolicParameters(
                        self, self.V, self.mu)
                # Initialize offline/online switch storage only once (may be shared between EIM/DEIM and exact evaluation)
                OfflineOnlineClassMethod = self.offline_online_backend.OfflineOnlineClassMethod
                OfflineOnlineExpansionStorage = self.offline_online_backend.OfflineOnlineExpansionStorage
                OfflineOnlineExpansionStorageSize = self.offline_online_backend.OfflineOnlineExpansionStorageSize
                OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
                if not isinstance(self.Q, OfflineOnlineSwitch):
                    assert isinstance(self.Q, dict)
                    assert len(self.Q) is 0
                    self.Q = OfflineOnlineExpansionStorageSize()
                if not isinstance(self.operator, OfflineOnlineSwitch):
                    assert isinstance(self.operator, dict)
                    assert len(self.operator) is 0
                    self.operator = OfflineOnlineExpansionStorage(
                        self, "OperatorExpansionStorage")
                if not isinstance(self.assemble_operator, OfflineOnlineSwitch):
                    assert inspect.ismethod(self.assemble_operator)
                    self._assemble_operator_exact = self.assemble_operator
                    self.assemble_operator = OfflineOnlineClassMethod(
                        self, "assemble_operator")
                if not isinstance(self.compute_theta, OfflineOnlineSwitch):
                    assert inspect.ismethod(self.compute_theta)
                    self._compute_theta_exact = self.compute_theta
                    self.compute_theta = OfflineOnlineClassMethod(
                        self, "compute_theta")
                # Temporarily replace float parameters with symbols, so that the forms do not hardcode
                # the current value of the parameter while assemblying.
                mu_float = self.mu
                self.mu = self.mu_symbolic
                # Setup offline/online switches
                former_stage = OfflineOnlineSwitch.get_current_stage()
                for stage_exact in self._apply_exact_evaluation_at_stages:
                    OfflineOnlineSwitch.set_current_stage(stage_exact)
                    # Enforce exact evaluation of assemble_operator and compute_theta
                    self.assemble_operator.attach(
                        self._assemble_operator_exact, lambda term: True)
                    self.compute_theta.attach(self._compute_theta_exact,
                                              lambda term: True)
                    # Setup offline/online operators storage with exact operators
                    self.operator.set_is_affine(False)
                    self._init_operators()
                    self.operator.unset_is_affine()
                # Restore former stage in offline/online switch storage
                OfflineOnlineSwitch.set_current_stage(former_stage)
                # Restore float parameters
                self.mu = mu_float

            def solve(self, **kwargs):
                # Exact operators should be used regardless of the current stage
                OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
                former_stage = OfflineOnlineSwitch.get_current_stage()
                OfflineOnlineSwitch.set_current_stage("offline")
                # Call Parent method
                solution = ParametrizedDifferentialProblem_DerivedClass.solve(
                    self, **kwargs)
                # Restore former stage in offline/online switch storage
                OfflineOnlineSwitch.set_current_stage(former_stage)
                # Return
                return solution

            def compute_output(self):
                # Exact operators should be used regardless of the current stage
                OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
                former_stage = OfflineOnlineSwitch.get_current_stage()
                OfflineOnlineSwitch.set_current_stage("offline")
                # Call Parent method
                output = ParametrizedDifferentialProblem_DerivedClass.compute_output(
                    self)
                # Restore former stage in offline/online switch storage
                OfflineOnlineSwitch.set_current_stage(former_stage)
                # Return
                return output

            def _cache_key_from_kwargs(self, **kwargs):
                cache_key = ParametrizedDifferentialProblem_DerivedClass._cache_key_from_kwargs(
                    self, **kwargs)
                # Change cache key depending on current stage
                OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch
                if OfflineOnlineSwitch.get_current_stage(
                ) in self._apply_exact_evaluation_at_stages:
                    # Append current stage to cache key
                    cache_key = cache_key + ("exact_evaluation", )
                # Return
                return cache_key
示例#29
0
def ParametrizedExpression(truth_problem,
                           parametrized_expression_code=None,
                           *args,
                           **kwargs):
    if parametrized_expression_code is None:
        return None

    assert "mu" in kwargs
    mu = kwargs["mu"]
    assert mu is not None
    assert isinstance(mu, tuple)
    P = len(mu)
    for p in range(P):
        assert isinstance(parametrized_expression_code, (tuple, str))
        if isinstance(parametrized_expression_code, tuple):
            if isinstance(parametrized_expression_code[0], tuple):
                matrix_after_replacements = list()
                for row in parametrized_expression_code:
                    assert isinstance(row, tuple)
                    new_row = list()
                    for item in row:
                        assert isinstance(item, str)
                        new_row.append(
                            item.replace("mu[" + str(p) + "]", "mu_" + str(p)))
                    new_row = tuple(new_row)
                    matrix_after_replacements.append(new_row)
                parametrized_expression_code = tuple(matrix_after_replacements)
            else:
                vector_after_replacements = list()
                for item in parametrized_expression_code:
                    assert isinstance(item, str)
                    vector_after_replacements.append(
                        item.replace("mu[" + str(p) + "]", "mu_" + str(p)))
                parametrized_expression_code = tuple(vector_after_replacements)
        elif isinstance(parametrized_expression_code, str):
            parametrized_expression_code = parametrized_expression_code.replace(
                "mu[" + str(p) + "]", "mu_" + str(p))
        else:
            raise TypeError(
                "Invalid expression type in ParametrizedExpression")

    # Detect mesh
    if "domain" in kwargs:
        mesh = kwargs["domain"]
    else:
        mesh = truth_problem.V.mesh()

    # Prepare a dictionary of mu
    mu_dict = dict()
    for (p, mu_p) in enumerate(mu):
        assert isinstance(mu_p, (Expression, Number))
        if isinstance(mu_p, Number):
            mu_dict["mu_" + str(p)] = mu_p
        elif isinstance(mu_p, Expression):
            assert is_parametrized_constant(mu_p)
            mu_dict["mu_" + str(p)] = parametrized_constant_to_float(
                mu_p, point=mesh.coordinates()[0])
    del kwargs["mu"]
    kwargs.update(mu_dict)

    # Initialize expression
    expression = Expression(parametrized_expression_code, *args, **kwargs)
    expression._mu = mu  # to avoid repeated assignments
    expression.problem = truth_problem

    # Store mesh
    expression._mesh = mesh

    # Cache all problem -> expression relation
    first_parametrized_expression_for_truth_problem = (
        truth_problem not in _truth_problem_to_parametrized_expressions)
    if first_parametrized_expression_for_truth_problem:
        _truth_problem_to_parametrized_expressions[truth_problem] = list()
    _truth_problem_to_parametrized_expressions[truth_problem].append(
        expression)

    # Keep mu in sync
    if first_parametrized_expression_for_truth_problem:

        def generate_overridden_set_mu(standard_set_mu):
            def overridden_set_mu(self, mu):
                standard_set_mu(mu)
                for expression_ in _truth_problem_to_parametrized_expressions[
                        self]:
                    if expression_._mu is not mu:
                        expression_._set_mu(mu)

            return overridden_set_mu

        if (
                "set_mu" in _original_setters
                and truth_problem in _original_setters["set_mu"]
        ):  # truth_problem.set_mu was already patched by the decorator @sync_setters
            standard_set_mu = _original_setters["set_mu"][truth_problem]
            overridden_set_mu = generate_overridden_set_mu(standard_set_mu)
            _original_setters["set_mu"][truth_problem] = types.MethodType(
                overridden_set_mu, truth_problem)
        else:
            standard_set_mu = truth_problem.set_mu
            overridden_set_mu = generate_overridden_set_mu(standard_set_mu)
            PatchInstanceMethod(truth_problem, "set_mu",
                                overridden_set_mu).patch()

    def expression_set_mu(self, mu):
        assert isinstance(mu, tuple)
        assert len(mu) >= len(self._mu)
        mu = mu[:len(self._mu)]
        for (p, mu_p) in enumerate(mu):
            assert isinstance(mu_p, (Expression, Number))
            if isinstance(mu_p, Number):
                setattr(self, "mu_" + str(p), mu_p)
            elif isinstance(mu_p, Expression):
                assert is_parametrized_constant(mu_p)
                setattr(
                    self, "mu_" + str(p),
                    parametrized_constant_to_float(
                        mu_p, point=mesh.coordinates()[0]))
        self._mu = mu

    AttachInstanceMethod(expression, "_set_mu", expression_set_mu).attach()
    # Note that this override is different from the one that we use in decorated problems,
    # since (1) we do not want to define a new child class, (2) we have to execute some preprocessing
    # on the data, (3) it is a one-way propagation rather than a sync.
    # For these reasons, the decorator @sync_setters is not used but we partially duplicate some code

    # Possibly also keep time in sync
    if hasattr(truth_problem, "set_time"):
        if first_parametrized_expression_for_truth_problem:

            def generate_overridden_set_time(standard_set_time):
                def overridden_set_time(self, t):
                    standard_set_time(t)
                    for expression_ in _truth_problem_to_parametrized_expressions[
                            self]:
                        if hasattr(expression_, "t"):
                            if expression_.t is not t:
                                assert isinstance(expression_.t, Number)
                                expression_.t = t

                return overridden_set_time

            if (
                    "set_time" in _original_setters
                    and truth_problem in _original_setters["set_time"]
            ):  # truth_problem.set_time was already patched by the decorator @sync_setters
                standard_set_time = _original_setters["set_time"][
                    truth_problem]
                overridden_set_time = generate_overridden_set_time(
                    standard_set_time)
                _original_setters["set_time"][
                    truth_problem] = types.MethodType(overridden_set_time,
                                                      truth_problem)
            else:
                standard_set_time = truth_problem.set_time
                overridden_set_time = generate_overridden_set_time(
                    standard_set_time)
                PatchInstanceMethod(truth_problem, "set_time",
                                    overridden_set_time).patch()

    return expression
    class TimeDependentReductionMethod_Class(
            DifferentialProblemReductionMethod_DerivedClass):

        # Default initialization of members
        def __init__(self, truth_problem, **kwargs):
            # Call to parent
            DifferentialProblemReductionMethod_DerivedClass.__init__(
                self, truth_problem, **kwargs)

            # Indices for undersampling snapshots, e.g. after a transient
            self.reduction_first_index = None  # keep temporal evolution from the beginning by default
            self.reduction_delta_index = None  # keep every time step by default
            self.reduction_last_index = None  # keep temporal evolution until the end by default

        # Set reduction initial time
        def set_reduction_initial_time(self, t0):
            assert isinstance(t0, Number)
            assert t0 >= self.truth_problem.t0
            self.reduction_first_index = int(t0 / self.truth_problem.dt)

        # Set reduction time step size
        def set_reduction_time_step_size(self, dt):
            assert isinstance(dt, Number)
            assert dt >= self.truth_problem.dt
            self.reduction_delta_index = int(dt / self.truth_problem.dt)
            assert isclose(
                self.reduction_delta_index * self.truth_problem.dt, dt
            ), "Reduction time step size should be a multiple of discretization time step size"

        # Set reduction final time
        def set_reduction_final_time(self, T):
            assert isinstance(T, Number)
            assert T <= self.truth_problem.T
            self.reduction_last_index = int(T / self.truth_problem.dt)

        def postprocess_snapshot(self, snapshot_over_time, snapshot_index):
            postprocessed_snapshot = list()
            for (k, snapshot_k) in enumerate(snapshot_over_time):
                self.reduced_problem.set_time(k * self.reduced_problem.dt)
                postprocessed_snapshot_k = DifferentialProblemReductionMethod_DerivedClass.postprocess_snapshot(
                    self, snapshot_k, snapshot_index)
                postprocessed_snapshot.append(postprocessed_snapshot_k)
            return postprocessed_snapshot

        def _patch_truth_solve(self, force, **kwargs):
            if "with_respect_to" in kwargs:
                assert inspect.isfunction(kwargs["with_respect_to"])
                other_truth_problem = kwargs["with_respect_to"](
                    self.truth_problem)

                def patched_truth_solve(self_, **kwargs_):
                    other_truth_problem.solve(**kwargs_)
                    assign(self.truth_problem._solution,
                           other_truth_problem._solution)
                    assign(self.truth_problem._solution_dot,
                           other_truth_problem._solution_dot)
                    assign(self.truth_problem._solution_over_time,
                           other_truth_problem._solution_over_time)
                    assign(self.truth_problem._solution_dot_over_time,
                           other_truth_problem._solution_dot_over_time)
                    return self.truth_problem._solution_over_time

                self.patch_truth_solve = PatchInstanceMethod(
                    self.truth_problem, "solve", patched_truth_solve)
                self.patch_truth_solve.patch()

                # Initialize the affine expansion in the other truth problem
                other_truth_problem.init()
            else:
                other_truth_problem = self.truth_problem

            # Clean up solution caching and disable I/O
            if force:
                # Make sure to clean up problem and reduced problem solution cache to ensure that
                # solution and reduced solution are actually computed
                other_truth_problem._solution_over_time_cache.clear()
                other_truth_problem._solution_dot_over_time_cache.clear()
                other_truth_problem._output_over_time_cache.clear()
                self.reduced_problem._solution_over_time_cache.clear()
                self.reduced_problem._solution_dot_over_time_cache.clear()
                self.reduced_problem._output_over_time_cache.clear()

                # Disable the capability of importing/exporting truth solutions
                def disable_import_solution_method(self_,
                                                   folder=None,
                                                   filename=None,
                                                   solution_over_time=None,
                                                   component=None,
                                                   suffix=None):
                    raise OSError

                self.disable_import_solution = PatchInstanceMethod(
                    other_truth_problem, "import_solution",
                    disable_import_solution_method)

                def disable_export_solution_method(self_,
                                                   folder=None,
                                                   filename=None,
                                                   solution_over_time=None,
                                                   component=None,
                                                   suffix=None):
                    pass

                self.disable_export_solution = PatchInstanceMethod(
                    other_truth_problem, "export_solution",
                    disable_export_solution_method)
                self.disable_import_solution.patch()
                self.disable_export_solution.patch()