Пример #1
0
 def enrich(self, functions, component=None, weights=None, copy=True):
     # Append to storage
     self._enrich(functions, component, weights, copy)
     # Reset precomputed slices
     self._precomputed_slices = Cache()
     # Prepare trivial precomputed slice
     self._precomputed_slices[0, len(self._list)] = self
Пример #2
0
 def __init__(self, space):
     self.space = space
     self._components_name = list()
     self._component_name_to_basis_component_index = ComponentNameToBasisComponentIndexDict()
     self._component_name_to_basis_component_length = OnlineSizeDict()
     self._enrich_memory = Cache()
     self._precomputed_slices = Cache() # from tuple to FunctionsList
Пример #3
0
    def __init__(self, reduced_problem, spectrum, eigensolver_parameters, folder_prefix):
        # Call the parent initialization
        ParametrizedProblem.__init__(self, folder_prefix)  # this class does not export anything
        self.reduced_problem = reduced_problem

        # Matrices/vectors resulting from the truth discretization
        self.operator = {
            "stability_factor_left_hand_matrix": None,
            # OnlineAffineExpansionStorage
            "stability_factor_right_hand_matrix": None
            # OnlineAffineExpansionStorage, even though it will contain only one matrix
        }
        self.spectrum = spectrum
        self.eigensolver_parameters = eigensolver_parameters

        # Solution
        self._eigenvalue = 0.
        self._eigenvector = None  # OnlineFunction

        # I/O
        def _eigenvalue_cache_key_generator(*args, **kwargs):
            return args

        self._eigenvalue_cache = Cache(
            "reduced problems",
            key_generator=_eigenvalue_cache_key_generator
        )

        def _eigenvector_cache_key_generator(*args, **kwargs):
            return args

        self._eigenvector_cache = Cache(
            "reduced problems",
            key_generator=_eigenvector_cache_key_generator
        )
Пример #4
0
 def __init__(self, space, component):
     if component is None:
         self.space = space
     else:
         self.space = wrapping.get_function_subspace(space, component)
     self.mpi_comm = wrapping.get_mpi_comm(space)
     self._list = list()  # of functions
     self._precomputed_slices = Cache()  # from tuple to FunctionsList
Пример #5
0
 def __init__(self, truth_problem, term, multiply_by_theta, spectrum, eigensolver_parameters, folder_prefix):
     # Call the parent initialization
     ParametrizedProblem.__init__(self, folder_prefix) # this class does not export anything
     self.truth_problem = truth_problem
     
     # Matrices/vectors resulting from the truth discretization
     self.term = term
     assert isinstance(self.term, (tuple, str))
     if isinstance(self.term, tuple):
         assert len(self.term) == 2
         isinstance(self.term[0], str)
         isinstance(self.term[1], int)
     self.multiply_by_theta = multiply_by_theta
     assert isinstance(self.multiply_by_theta, bool)
     self.operator = None # AffineExpansionStorage
     self.inner_product = None # AffineExpansionStorage, even though it will contain only one matrix
     self.spectrum = spectrum
     self.eigensolver_parameters = eigensolver_parameters
     
     # Avoid useless computations
     self._eigenvalue = 0.
     self._eigenvector = Function(truth_problem.V)
     # I/O
     self.folder["cache"] = os.path.join(folder_prefix, "cache")
     def _eigenvalue_cache_key_generator(*args, **kwargs):
         return args
     def _eigenvalue_cache_import(filename):
         self.import_eigenvalue(self.folder["cache"], filename)
         return self._eigenvalue
     def _eigenvalue_cache_export(filename):
         self.export_eigenvalue(self.folder["cache"], filename)
     def _eigenvalue_cache_filename_generator(*args, **kwargs):
         return self._cache_file(args)
     self._eigenvalue_cache = Cache(
         "problems",
         key_generator=_eigenvalue_cache_key_generator,
         import_=_eigenvalue_cache_import,
         export=_eigenvalue_cache_export,
         filename_generator=_eigenvalue_cache_filename_generator
     )
     def _eigenvector_cache_key_generator(*args, **kwargs):
         return args
     def _eigenvector_cache_import(filename):
         self.import_eigenvector(self.folder["cache"], filename)
         return self._eigenvector
     def _eigenvector_cache_export(filename):
         self.export_eigenvector(self.folder["cache"], filename)
     def _eigenvector_cache_filename_generator(*args, **kwargs):
         return self._cache_file(args)
     self._eigenvector_cache = Cache(
         "problems",
         key_generator=_eigenvector_cache_key_generator,
         import_=_eigenvector_cache_import,
         export=_eigenvector_cache_export,
         filename_generator=_eigenvector_cache_filename_generator
     )
    def __init__(self, truth_problem, **kwargs):

        # Call to parent
        ParametrizedProblem.__init__(self, truth_problem.name())

        # $$ ONLINE DATA STRUCTURES $$ #
        # Online reduced space dimension
        self.N = None  # integer (for problems with one component) or dict of integers (for problem with several components)
        self.N_bc = None  # integer (for problems with one component) or dict of integers (for problem with several components)
        self.dirichlet_bc = None  # bool (for problems with one component) or dict of bools (for problem with several components)
        self.dirichlet_bc_are_homogeneous = None  # bool (for problems with one component) or dict of bools (for problem with several components)
        self._combined_and_homogenized_dirichlet_bc = None
        # Form names and order
        self.terms = truth_problem.terms
        self.terms_order = truth_problem.terms_order
        self.components = truth_problem.components
        # Number of terms in the affine expansion
        self.Q = dict()  # from string to integer
        # Reduced order operators
        self.OperatorExpansionStorage = OnlineAffineExpansionStorage
        self.operator = dict()  # from string to OperatorExpansionStorage
        self.inner_product = None  # AffineExpansionStorage (for problems with one component) or dict of AffineExpansionStorage (for problem with several components), even though it will contain only one matrix
        self._combined_inner_product = None
        self.projection_inner_product = None  # AffineExpansionStorage (for problems with one component) or dict of AffineExpansionStorage (for problem with several components), even though it will contain only one matrix
        self._combined_projection_inner_product = None
        # Solution
        self._solution = None  # OnlineFunction
        self._output = 0.

        # I/O
        def _solution_cache_key_generator(*args, **kwargs):
            assert len(args) is 2
            assert args[0] == self.mu
            return self._cache_key_from_N_and_kwargs(args[1], **kwargs)

        self._solution_cache = Cache(
            "reduced problems", key_generator=_solution_cache_key_generator)

        def _output_cache_key_generator(*args, **kwargs):
            assert len(args) is 2
            assert args[0] == self.mu
            return self._cache_key_from_N_and_kwargs(args[1], **kwargs)

        self._output_cache = Cache("reduced problems",
                                   key_generator=_output_cache_key_generator)

        # $$ OFFLINE DATA STRUCTURES $$ #
        # High fidelity problem
        self.truth_problem = truth_problem
        # Basis functions matrix
        self.basis_functions = None  # BasisFunctionsMatrix
        # I/O
        self.folder["basis"] = os.path.join(self.folder_prefix, "basis")
        self.folder["reduced_operators"] = os.path.join(
            self.folder_prefix, "reduced_operators")
Пример #7
0
 def __init__(self, arg1, arg2):
     self._content = None
     self._precomputed_slices = Cache(
     )  # from tuple to AffineExpansionStorage
     self._smallest_key = None
     self._previous_key = None
     self._largest_key = None
     # Auxiliary storage for __getitem__ slicing
     self._component_name_to_basis_component_index = None  # will be filled in in __setitem__, if required
     self._component_name_to_basis_component_length = None  # will be filled in in __setitem__, if required
     # Initialize arguments from inputs
     self._init(arg1, arg2)
Пример #8
0
 def __init__(self, *shape):
     self._shape = shape
     self._type = "empty"
     self._content = dict()
     self._precomputed_slices = Cache() # from tuple to NonAffineExpansionStorage
     assert len(shape) in (1, 2)
     if len(shape) is 1:
         self._smallest_key = 0
         self._largest_key = shape[0] - 1
     else:
         self._smallest_key = (0, 0)
         self._largest_key = (shape[0] - 1, shape[1] - 1)
        def __init__(self, truth_problem, **kwargs):
            # Call the parent initialization
            ParametrizedReducedDifferentialProblem_DerivedClass.__init__(
                self, truth_problem, **kwargs)
            # Store quantities related to the time discretization
            assert truth_problem.t == 0.
            self.t = 0.
            self.t0 = truth_problem.t0
            assert truth_problem.dt is not None
            self.dt = truth_problem.dt
            assert truth_problem.T is not None
            self.T = truth_problem.T
            # Additional options for time stepping may be stored in the following dict
            self._time_stepping_parameters = dict()
            self._time_stepping_parameters["initial_time"] = self.t0
            self._time_stepping_parameters["time_step_size"] = self.dt
            self._time_stepping_parameters["final_time"] = self.T
            # Online reduced space dimension
            self.initial_condition = None  # bool (for problems with one component) or dict of bools (for problem with several components)
            self.initial_condition_is_homogeneous = None  # bool (for problems with one component) or dict of bools (for problem with several components)
            # Number of terms in the affine expansion
            self.Q_ic = None  # integer (for problems with one component) or dict of integers (for problem with several components)
            # Time derivative of the solution, at the current time
            self._solution_dot = None  # OnlineFunction
            # Solution and output over time
            self._solution_over_time = list()  # of Functions
            self._solution_dot_over_time = list()  # of Functions
            self._output_over_time = list()  # of numbers

            # I/O
            def _solution_cache_key_generator(*args, **kwargs):
                assert len(args) is 2
                assert args[0] == self.mu
                return self._cache_key_from_N_and_kwargs(args[1], **kwargs)

            self._solution_over_time_cache = Cache(
                "reduced problems",
                key_generator=_solution_cache_key_generator)
            self._solution_dot_over_time_cache = Cache(
                "reduced problems",
                key_generator=_solution_cache_key_generator)
            del self._solution_cache

            def _output_cache_key_generator(*args, **kwargs):
                assert len(args) is 2
                assert args[0] == self.mu
                return self._cache_key_from_N_and_kwargs(args[1], **kwargs)

            self._output_over_time_cache = Cache(
                "reduced problems", key_generator=_output_cache_key_generator)
            del self._output_cache
Пример #10
0
 def __init__(self, space, component=None):
     if component is not None:
         self.space = wrapping.get_function_subspace(space, component)
     else:
         self.space = space
     self.mpi_comm = wrapping.get_mpi_comm(space)
     self._components = dict()  # of FunctionsList
     self._precomputed_sub_components = Cache(
     )  # from tuple to FunctionsList
     self._precomputed_slices = Cache()  # from tuple to FunctionsList
     self._components_name = list()  # filled in by init
     self._component_name_to_basis_component_index = ComponentNameToBasisComponentIndexDict(
     )  # filled in by init
     self._component_name_to_basis_component_length = OnlineSizeDict()
Пример #11
0
 def __init__(self, truth_problem, parametrized_expression, folder_prefix, basis_generation):
     # Call the parent initialization
     EIMApproximation.__init__(self, truth_problem, parametrized_expression, folder_prefix, basis_generation)
     
     # Store quantities related to the time discretization
     self.t0 = 0.
     self.t = 0.
     self.dt = None
     self.T = None
     
     # I/O
     def _snapshot_cache_key_generator(*args, **kwargs):
         assert len(args) is 2
         assert args[0] == self.mu
         assert args[1] == self.t
         assert len(kwargs) is 0
         return self._cache_key()
     def _snapshot_cache_import(filename):
         self.import_solution(self.folder["cache"], filename)
         return self.snapshot
     def _snapshot_cache_export(filename):
         self.export_solution(self.folder["cache"], filename)
     def _snapshot_cache_filename_generator(*args, **kwargs):
         assert len(args) is 2
         assert args[0] == self.mu
         assert args[1] == self.t
         assert len(kwargs) is 0
         return self._cache_file()
     self._snapshot_cache = Cache(
         "EIM",
         key_generator=_snapshot_cache_key_generator,
         import_=_snapshot_cache_import,
         export=_snapshot_cache_export,
         filename_generator=_snapshot_cache_filename_generator
     )
Пример #12
0
    def __init__(self, truth_problem, parametrized_expression, folder_prefix,
                 basis_generation):
        # Call the parent initialization
        ParametrizedProblem.__init__(self, folder_prefix)
        # Store the parametrized expression
        self.parametrized_expression = parametrized_expression
        self.truth_problem = truth_problem
        assert basis_generation in ("Greedy", "POD")
        self.basis_generation = basis_generation

        # $$ ONLINE DATA STRUCTURES $$ #
        # Online reduced space dimension
        self.N = 0
        # Define additional storage for EIM:
        # Interpolation locations selected by the greedy (either a ReducedVertices or ReducedMesh)_
        self.interpolation_locations = parametrized_expression.create_interpolation_locations_container(
        )
        # Interpolation matrix
        self.interpolation_matrix = OnlineAffineExpansionStorage(1)
        # Solution
        self._interpolation_coefficients = None  # OnlineFunction

        # $$ OFFLINE DATA STRUCTURES $$ #
        self.snapshot = parametrized_expression.create_empty_snapshot()
        # Basis functions container
        self.basis_functions = parametrized_expression.create_basis_container()
        # I/O
        self.folder["basis"] = os.path.join(self.folder_prefix, "basis")
        self.folder["cache"] = os.path.join(self.folder_prefix, "cache")
        self.folder["reduced_operators"] = os.path.join(
            self.folder_prefix, "reduced_operators")

        def _snapshot_cache_key_generator(*args, **kwargs):
            assert args == self.mu
            assert len(kwargs) == 0
            return self._cache_key()

        def _snapshot_cache_import(filename):
            snapshot = copy(self.snapshot)
            self.import_solution(self.folder["cache"], filename, snapshot)
            return snapshot

        def _snapshot_cache_export(filename):
            self.export_solution(self.folder["cache"], filename)

        def _snapshot_cache_filename_generator(*args, **kwargs):
            assert args == self.mu
            assert len(kwargs) == 0
            return self._cache_file()

        self._snapshot_cache = Cache(
            "EIM",
            key_generator=_snapshot_cache_key_generator,
            import_=_snapshot_cache_import,
            export=_snapshot_cache_export,
            filename_generator=_snapshot_cache_filename_generator)
Пример #13
0
    def __init__(self, V, **kwargs):
        # Call to parent
        StokesProblem_Base.__init__(self, V, **kwargs)

        # Form names for saddle point problems
        self.terms = [
            "a",
            "b",
            "bt",
            "f",
            "g",
            # Auxiliary terms for supremizer enrichment
            "bt_restricted"
        ]
        self.terms_order = {
            "a": 2,
            "b": 2,
            "bt": 2,
            "f": 1,
            "g": 1,
            # Auxiliary terms for supremizer enrichment
            "bt_restricted": 2
        }
        self.components = ["u", "s", "p"]

        # Auxiliary storage for supremizer enrichment, using a subspace of V
        self._supremizer = Function(V, "s")

        # I/O
        def _supremizer_cache_key_generator(*args, **kwargs):
            assert len(args) is 1
            assert args[0] == self.mu
            return self._supremizer_cache_key_from_kwargs(**kwargs)

        def _supremizer_cache_import(filename):
            supremizer = copy(self._supremizer)
            self.import_supremizer(self.folder["cache"], filename, supremizer)
            return supremizer

        def _supremizer_cache_export(filename):
            self.export_supremizer(self.folder["cache"], filename)

        def _supremizer_cache_filename_generator(*args, **kwargs):
            assert len(args) is 1
            assert args[0] == self.mu
            return self._supremizer_cache_file_from_kwargs(**kwargs)

        self._supremizer_cache = Cache(
            "problems",
            key_generator=_supremizer_cache_key_generator,
            import_=_supremizer_cache_import,
            export=_supremizer_cache_export,
            filename_generator=_supremizer_cache_filename_generator)
Пример #14
0
        def __init__(self, truth_problem, **kwargs):
            StokesReducedProblem_Base.__init__(self, truth_problem, **kwargs)
            # Auxiliary storage for solution of reduced order supremizer problem (if requested through solve_supremizer)
            self._supremizer = None  # OnlineFunction

            # I/O
            def _supremizer_cache_key_generator(*args, **kwargs):
                assert len(args) is 2
                assert args[0] == self.mu
                return self._supremizer_cache_key_from_N_and_kwargs(
                    args[1], **kwargs)

            self._supremizer_cache = Cache(
                "reduced problems",
                key_generator=_supremizer_cache_key_generator)
class DelayedBasisFunctionsMatrix(object):
    def __init__(self, space):
        self.space = space
        self._components_name = list()
        self._component_name_to_basis_component_index = ComponentNameToBasisComponentIndexDict(
        )
        self._component_name_to_basis_component_length = OnlineSizeDict()
        self._enrich_memory = Cache()
        self._precomputed_slices = Cache()  # from tuple to FunctionsList

    def init(self, components_name):

        # Patch DelayedFunctionsList.enrich() to update internal attributes
        def patch_delayed_functions_list_enrich(component_name, memory):
            original_delayed_functions_list_enrich = memory.enrich

            def patched_delayed_functions_list_enrich(self_,
                                                      functions,
                                                      component=None,
                                                      weights=None,
                                                      copy=True):
                # Append to storage
                original_delayed_functions_list_enrich(functions, component,
                                                       weights, copy)
                # Update component name to basis component length
                if component is not None:
                    if isinstance(component, dict):
                        assert len(component) == 1
                        for (_, component_to) in component.items():
                            break
                        assert component_name == component_to
                    else:
                        assert component_name == component
                self._update_component_name_to_basis_component_length(
                    component_name)
                # Reset precomputed slices
                self._precomputed_slices.clear()
                # Prepare trivial precomputed slice
                self._prepare_trivial_precomputed_slice()

            memory.enrich_patch = PatchInstanceMethod(
                memory, "enrich", patched_delayed_functions_list_enrich)
            memory.enrich_patch.patch()

        assert len(self._components_name) == 0
        self._components_name = components_name
        for (basis_component_index,
             component_name) in enumerate(components_name):
            self._component_name_to_basis_component_index[
                component_name] = basis_component_index
            self._component_name_to_basis_component_length[component_name] = 0
            self._enrich_memory[component_name] = DelayedFunctionsList(
                self.space)
            patch_delayed_functions_list_enrich(
                component_name, self._enrich_memory[component_name])

    def enrich(self, function, component=None, weight=None, copy=True):
        assert isinstance(function, DelayedLinearSolver)
        assert component is None
        assert weight is None
        assert copy is True
        assert len(self._components_name) == 1
        assert len(self._enrich_memory) == 1
        component_0 = self._components_name[0]
        # Append to storage
        self._enrich_memory[component_0].enrich(function, component, weight,
                                                copy)

    @overload(None)
    def _update_component_name_to_basis_component_length(self, component):
        assert len(self._enrich_memory) == 1
        assert len(self._components_name) == 1
        component_0 = self._components_name[0]
        self._component_name_to_basis_component_length[component_0] = len(
            self._enrich_memory[component_0])

    @overload(str)
    def _update_component_name_to_basis_component_length(self, component):
        self._component_name_to_basis_component_length[component] = len(
            self._enrich_memory[component])

    def _prepare_trivial_precomputed_slice(self):
        if len(self._enrich_memory) == 1:
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            precomputed_slice_key_start = 0
            precomputed_slice_key_stop = self._component_name_to_basis_component_length[
                component_0]
        else:
            precomputed_slice_key_start = list()
            precomputed_slice_key_stop = list()
            for component_name in self._components_name:
                precomputed_slice_key_start.append(0)
                precomputed_slice_key_stop.append(
                    self.
                    _component_name_to_basis_component_length[component_name])
            precomputed_slice_key_start = tuple(precomputed_slice_key_start)
            precomputed_slice_key_stop = tuple(precomputed_slice_key_stop)
        self._precomputed_slices[precomputed_slice_key_start,
                                 precomputed_slice_key_stop] = self

    @overload(slice)  # e.g. key = :N, return the first N functions
    def __getitem__(self, key):
        assert key.step is None
        return self._precompute_slice(key.start, key.stop)

    @overload(str)
    def __getitem__(self, key):
        return self._enrich_memory[key]

    def __len__(self):
        assert len(self._components_name) == 1
        assert len(self._enrich_memory) == 1
        component_0 = self._components_name[0]
        return self._component_name_to_basis_component_length[component_0]

    @overload(None, int)
    def _precompute_slice(self, _, N_stop):
        return self._precompute_slice(0, N_stop)

    @overload(int, None)
    def _precompute_slice(self, N_start, _):
        return self._precompute_slice(N_start, len(self))

    @overload(int, int)
    def _precompute_slice(self, N_start, N_stop):
        if (N_start, N_stop) not in self._precomputed_slices:
            assert len(self._enrich_memory) == 1
            output = DelayedBasisFunctionsMatrix(self.space)
            output.init(self._components_name)
            for component_name in self._components_name:
                output._enrich_memory[component_name].enrich(
                    self._enrich_memory[component_name][N_start:N_stop])
            self._precomputed_slices[N_start, N_stop] = output
        return self._precomputed_slices[N_start, N_stop]

    @overload(None, OnlineSizeDict)
    def _precompute_slice(self, _, N_stop):
        N_start = OnlineSizeDict()
        for component_name in self._components_name:
            N_start[component_name] = 0
        return self._precompute_slice(N_start, N_stop)

    @overload(OnlineSizeDict, None)
    def _precompute_slice(self, N_start, _):
        N_stop = OnlineSizeDict()
        for component_name in self._components_name:
            N_stop[
                component_name] = self._component_name_to_basis_component_length[
                    component_name]
        return self._precompute_slice(N_start, len(self))

    @overload(OnlineSizeDict, OnlineSizeDict)
    def _precompute_slice(self, N_start, N_stop):
        assert set(N_start.keys()) == set(self._components_name)
        assert set(N_stop.keys()) == set(self._components_name)
        N_start_key = tuple(N_start[component_name]
                            for component_name in self._components_name)
        N_stop_key = tuple(N_stop[component_name]
                           for component_name in self._components_name)
        if (N_start_key, N_stop_key) not in self._precomputed_slices:
            output = DelayedBasisFunctionsMatrix(self.space)
            output.init(self._components_name)
            for component_name in self._components_name:
                output._enrich_memory[component_name].enrich(
                    self._enrich_memory[component_name]
                    [N_start[component_name]:N_stop[component_name]])
            self._precomputed_slices[N_start_key, N_stop_key] = output
        return self._precomputed_slices[N_start_key, N_stop_key]

    def save(self, directory, filename):
        for (component, memory) in self._enrich_memory.items():
            memory.save(directory, filename + "_" + component)

    def load(self, directory, filename):
        return_value = True
        for (component, memory) in self._enrich_memory.items():
            # Skip updating internal attributes while reading in basis functions, we will do that
            # only once at the end
            assert hasattr(memory, "enrich_patch")
            memory.enrich_patch.unpatch()
            # Load each component
            return_value_component = memory.load(directory,
                                                 filename + "_" + component)
            return_value = return_value and return_value_component
            # Populate component length
            self._update_component_name_to_basis_component_length(component)
            # Restore patched enrich method
            memory.enrich_patch.patch()
        # Reset precomputed slices
        self._precomputed_slices.clear()
        # Prepare trivial precomputed slice
        self._prepare_trivial_precomputed_slice()
        return return_value

    def get_problem_name(self):
        problem_name = None
        for (_, memory) in self._enrich_memory.items():
            if problem_name is None:
                problem_name = memory.get_problem_name()
            else:
                assert memory.get_problem_name() == problem_name
        return problem_name
Пример #16
0
    if V.num_sub_spaces() == 0:
        if index_V is not None:
            sub_elements[tuple(index_V)] = V.ufl_element()
        return sub_elements
    else:
        for i in range(V.num_sub_spaces()):
            index_V_comma_i = list()
            if index_V is not None:
                index_V_comma_i.extend(index_V)
            index_V_comma_i.append(i)
            sub_elements_i = _get_sub_elements__recursive(
                V.sub(i), index_V_comma_i)
            sub_elements.update(sub_elements_i)
            sub_elements[tuple(index_V_comma_i)] = V.ufl_element()
        return sub_elements


def _split_from_tuple(input_, index_as_tuple):
    assert isinstance(index_as_tuple, tuple)
    assert len(index_as_tuple) > 0
    if len(index_as_tuple) == 1 and index_as_tuple[0] is None:
        return input_
    else:
        for i in index_as_tuple:
            input_ = split(input_)[i]
        return input_


_solution_split_to_component = Cache()
_solution_split_to_solution = Cache()
Пример #17
0
                    min_global_cell_index = current_global_cell_index
                    min_cell_dof = current_cell_dof
            return (min_global_cell_index, min_cell_dof)

        if local_dofmap is None:
            local_dofmap = _get_local_dofmap(V)
        dof_map_writer_mapping_original = _build_dof_map_writer_mapping(
            V, local_dofmap)
        dof_map_writer_mapping_storage = dict()
        for (key, value) in dof_map_writer_mapping_original.items():
            dof_map_writer_mapping_storage[key] = extract_first_cell(value)
        _dof_map_writer_mapping_cache[V] = dof_map_writer_mapping_storage
        return _dof_map_writer_mapping_cache[V]


_dof_map_writer_mapping_cache = Cache()


def build_dof_map_reader_mapping(V, local_dofmap=None):
    try:
        return _dof_map_reader_mapping_cache[V]
    except KeyError:
        if local_dofmap is None:
            local_dofmap = _get_local_dofmap(V)
        _dof_map_reader_mapping_cache[V] = _build_dof_map_reader_mapping(
            V, local_dofmap)
        return _dof_map_reader_mapping_cache[V]


_dof_map_reader_mapping_cache = Cache()
Пример #18
0
    class _AffineExpansionStorage(AbstractAffineExpansionStorage):
        def __init__(self, arg1, arg2):
            self._content = None
            self._precomputed_slices = Cache(
            )  # from tuple to AffineExpansionStorage
            self._smallest_key = None
            self._previous_key = None
            self._largest_key = None
            # Auxiliary storage for __getitem__ slicing
            self._component_name_to_basis_component_index = None  # will be filled in in __setitem__, if required
            self._component_name_to_basis_component_length = None  # will be filled in in __setitem__, if required
            # Initialize arguments from inputs
            self._init(arg1, arg2)

        @overload(
            (tuple_of(backend.Matrix.Type()), tuple_of(backend.Vector.Type())),
            None)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((len(arg1), ),
                                                               dtype=object)
            self._smallest_key = 0
            self._largest_key = len(arg1) - 1
            for (i, arg1i) in enumerate(arg1):
                self[i] = arg1i

        @overload(int, None)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((arg1, ),
                                                               dtype=object)
            self._smallest_key = 0
            self._largest_key = arg1 - 1

        @overload(int, int)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((arg1, arg2),
                                                               dtype=object)
            self._smallest_key = (0, 0)
            self._largest_key = (arg1 - 1, arg2 - 1)

        def save(self, directory, filename):
            # Get full directory name
            full_directory = Folders.Folder(
                os.path.join(str(directory), filename))
            full_directory.create()
            # Exit in the trivial case of empty affine expansion
            if self._content.size is 0:
                return
            # Initialize iterator
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            # Save content item type and shape
            self._save_content_item_type_shape(self._content[it.multi_index],
                                               it, full_directory)
            # Save content
            self._save_content(self._content[it.multi_index], it,
                               full_directory)
            # Save dicts
            self._save_dicts(full_directory)

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("matrix", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file((item.M, item.N), full_directory,
                                         "content_item_shape")

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("vector", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(item.N, full_directory,
                                         "content_item_shape")

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("function", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(item.N, full_directory,
                                         "content_item_shape")

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("scalar", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("functions_list", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("basis_functions_matrix",
                                        full_directory, "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("empty", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.tensor_save(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.tensor_save(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.function_save(self._content[it.multi_index],
                                       full_directory,
                                       "content_item_" + str(it.index))
                it.iternext()

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                ScalarContentIO.save_file(self._content[it.multi_index],
                                          full_directory,
                                          "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].save(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].save(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            pass

        def _save_dicts(self, full_directory):
            DictIO.save_file(self._component_name_to_basis_component_index,
                             full_directory,
                             "component_name_to_basis_component_index")
            DictIO.save_file(self._component_name_to_basis_component_length,
                             full_directory,
                             "component_name_to_basis_component_length")

        def load(self, directory, filename):
            if self._content is not None:  # avoid loading multiple times
                if self._content.size > 0:
                    it = AffineExpansionStorageContent_Iterator(
                        self._content,
                        flags=["multi_index", "refs_ok"],
                        op_flags=["readonly"])
                    while not it.finished:
                        if self._content[
                                it.
                                multi_index] is not None:  # ... but only if there is at least one element different from None
                            if isinstance(self._content[it.multi_index],
                                          AbstractFunctionsList):
                                if len(
                                        self._content[it.multi_index]
                                ) > 0:  # ... unless it is an empty FunctionsList
                                    return False
                            elif isinstance(self._content[it.multi_index],
                                            AbstractBasisFunctionsMatrix):
                                if sum(
                                        self._content[it.multi_index].
                                        _component_name_to_basis_component_length
                                        .values()
                                ) > 0:  # ... unless it is an empty BasisFunctionsMatrix
                                    return False
                            else:
                                return False
                        it.iternext()
            # Get full directory name
            full_directory = Folders.Folder(
                os.path.join(str(directory), filename))
            # Exit in the trivial case of empty affine expansion
            if self._content.size is 0:
                return True
            # Load content item type and shape
            reference_item = self._load_content_item_type_shape(full_directory)
            # Initialize iterator
            it = AffineExpansionStorageContent_Iterator(
                self._content, flags=["c_index", "multi_index", "refs_ok"])
            # Load content
            self._load_content(reference_item, it, full_directory)
            # Load dicts
            self._load_dicts(full_directory)
            # Reset precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice(reference_item)
            # Return
            return True

        def _load_content_item_type_shape(self, full_directory):
            assert ContentItemTypeIO.exists_file(full_directory,
                                                 "content_item_type")
            content_item_type = ContentItemTypeIO.load_file(
                full_directory, "content_item_type")
            assert ContentItemShapeIO.exists_file(full_directory,
                                                  "content_item_shape")
            assert content_item_type in ("matrix", "vector", "function",
                                         "scalar", "functions_list",
                                         "basis_functions_matrix", "empty")
            if content_item_type == "matrix":
                (M, N) = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Matrix(M, N)
            elif content_item_type == "vector":
                N = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Vector(N)
            elif content_item_type == "function":
                N = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Function(N)
            elif content_item_type == "scalar":
                return 0.
            elif content_item_type == "functions_list":  # self._content has already been populated with empty items
                assert isinstance(self._content[self._smallest_key],
                                  AbstractFunctionsList)
                return self._content[self._smallest_key]
            elif content_item_type == "basis_functions_matrix":  # self._content has already been populated with empty items
                assert isinstance(self._content[self._smallest_key],
                                  AbstractBasisFunctionsMatrix)
                return self._content[self._smallest_key]
            elif content_item_type == "empty":
                return None
            else:  # impossible to arrive here anyway thanks to the assert
                raise ValueError("Invalid content item type.")

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.tensor_copy(item)
                wrapping.tensor_load(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.tensor_copy(item)
                wrapping.tensor_load(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.function_copy(item)
                wrapping.function_load(self._content[it.multi_index],
                                       full_directory,
                                       "content_item_" + str(it.index))
                it.iternext()

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = ScalarContentIO.load_file(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].load(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].load(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            pass

        def _load_dicts(self, full_directory):
            assert DictIO.exists_file(
                full_directory, "component_name_to_basis_component_index")
            self._component_name_to_basis_component_index = DictIO.load_file(
                full_directory,
                "component_name_to_basis_component_index",
                globals={
                    "ComponentNameToBasisComponentIndexDict":
                    ComponentNameToBasisComponentIndexDict
                })
            assert DictIO.exists_file(
                full_directory, "component_name_to_basis_component_length")
            self._component_name_to_basis_component_length = DictIO.load_file(
                full_directory,
                "component_name_to_basis_component_length",
                globals={"OnlineSizeDict": OnlineSizeDict})
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                if self._component_name_to_basis_component_index is not None:
                    self._content[
                        it.
                        multi_index]._component_name_to_basis_component_index = self._component_name_to_basis_component_index
                if self._component_name_to_basis_component_length is not None:
                    self._content[
                        it.
                        multi_index]._component_name_to_basis_component_length = self._component_name_to_basis_component_length
                it.iternext()

        @overload(
            backend.Matrix.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item, (empty_slice, empty_slice),
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            backend.Vector.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item, empty_slice,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            backend.Function.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item.vector, empty_slice,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            Number, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            AbstractFunctionsList, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            AbstractBasisFunctionsMatrix, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            None, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            (slice, tuple_of(slice)), )
        def __getitem__(self, key):
            """
            return the subtensors of size "key" for every element in content. (e.g. submatrices [1:5,1:5] of the affine expansion of A)
            """
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["multi_index", "refs_ok"],
                op_flags=["readonly"])
            slices = slice_to_array(
                self._content[it.multi_index], key,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)

            if slices in self._precomputed_slices:
                return self._precomputed_slices[slices]
            else:
                output = _AffineExpansionStorage.__new__(
                    type(self), *self._content.shape)
                output.__init__(*self._content.shape)
                while not it.finished:
                    # Slice content and assign
                    output[it.multi_index] = self._do_slicing(
                        self._content[it.multi_index], key)
                    # Increment
                    it.iternext()
                self._precomputed_slices[slices] = output
                return output

        @overload(
            (int, tuple_of(int)), )
        def __getitem__(self, key):
            """
            return the element at position "key" in the storage (e.g. q-th matrix in the affine expansion of A, q = 1 ... Qa)
            """
            return self._content[key]

        @overload(backend.Matrix.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return item[key]

        @overload(backend.Vector.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return item[key]

        @overload(backend.Function.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return backend.Function(item.vector()[key])

        def __setitem__(self, key, item):
            assert not isinstance(
                key, slice
            )  # only able to set the element at position "key" in the storage
            # Check that __getitem__ is not random acces but called for increasing key and store current key
            self._assert_setitem_order(key)
            self._update_previous_key(key)
            # Store item
            self._content[key] = item
            # Reset attributes related to basis functions matrix if the size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._component_name_to_basis_component_index = None
                self._component_name_to_basis_component_length = None
            # Also store attributes related to basis functions matrix for __getitem__ slicing
            assert isinstance(
                item,
                (
                    backend.Matrix.Type(),  # output e.g. of Z^T*A*Z
                    backend.Vector.Type(),  # output e.g. of Z^T*F
                    backend.Function.Type(
                    ),  # for initial conditions of unsteady problems
                    Number,  # output of Riesz_F^T*X*Riesz_F
                    AbstractFunctionsList,  # auxiliary storage of Riesz representors
                    AbstractBasisFunctionsMatrix  # auxiliary storage of Riesz representors
                ))
            if isinstance(item, backend.Function.Type()):
                item = item.vector()
            if isinstance(item, (backend.Matrix.Type(), backend.Vector.Type(),
                                 AbstractBasisFunctionsMatrix)):
                assert (
                    self._component_name_to_basis_component_index is None) == (
                        self._component_name_to_basis_component_length is None)
                if self._component_name_to_basis_component_index is None:
                    self._component_name_to_basis_component_index = item._component_name_to_basis_component_index
                    self._component_name_to_basis_component_length = item._component_name_to_basis_component_length
                else:
                    assert self._component_name_to_basis_component_index == item._component_name_to_basis_component_index
                    assert self._component_name_to_basis_component_length == item._component_name_to_basis_component_length
            else:
                assert self._component_name_to_basis_component_index is None
                assert self._component_name_to_basis_component_length is None
            # Reset and prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice(item)

        @overload(int)
        def _assert_setitem_order(self, current_key):
            if self._previous_key is None:
                assert current_key == 0
            else:
                assert current_key == (self._previous_key +
                                       1) % (self._largest_key + 1)

        @overload(int, int)
        def _assert_setitem_order(self, current_key_0, current_key_1):
            if self._previous_key is None:
                assert current_key_0 == 0
                assert current_key_1 == 0
            else:
                expected_key_1 = (self._previous_key[1] +
                                  1) % (self._largest_key[1] + 1)
                if expected_key_1 is 0:
                    expected_key_0 = (self._previous_key[0] +
                                      1) % (self._largest_key[0] + 1)
                else:
                    expected_key_0 = self._previous_key[0]
                assert current_key_0 == expected_key_0
                assert current_key_1 == expected_key_1

        @overload(tuple_of(int))
        def _assert_setitem_order(self, current_key):
            self._assert_setitem_order(*current_key)

        @overload(int)
        def _update_previous_key(self, current_key):
            self._previous_key = current_key

        @overload(int, int)
        def _update_previous_key(self, current_key_0, current_key_1):
            self._previous_key = (current_key_0, current_key_1)

        @overload(tuple_of(int))
        def _update_previous_key(self, current_key):
            self._update_previous_key(*current_key)

        def __iter__(self):
            return AffineExpansionStorageContent_Iterator(
                self._content, flags=["refs_ok"], op_flags=["readonly"])

        def __len__(self):
            assert self.order() == 1
            return self._content.size

        def order(self):
            assert self._content is not None
            return len(self._content.shape)
Пример #19
0
        funs = fun.split(deepcopy=True)
        for (i, fun_i) in enumerate(funs):
            if components is not None:
                filename_i = filename + "_subcomponent_" + str(i)
            else:
                filename_i = filename + "_component_" + str(i)
            _write_to_file(fun_i, directory, filename_i, suffix, None)
    else:
        if suffix is not None:
            if suffix is 0:
                # Remove existing files if any, as new functions should not be appended, but rather overwrite existing functions
                SolutionFile.remove_files(directory, filename)
                # Remove from storage and re-create
                try:
                    del _all_solution_files[(directory, filename)]
                except KeyError:
                    pass
                _all_solution_files[(directory, filename)] = SolutionFile(
                    directory, filename)
            file_ = _all_solution_files[(directory, filename)]
            file_.write(fun, function_name, suffix)
        else:
            # Remove existing files if any, as new functions should not be appended, but rather overwrite existing functions
            SolutionFile.remove_files(directory, filename)
            # Write function to file
            file_ = SolutionFile(directory, filename)
            file_.write(fun, function_name, 0)


_all_solution_files = Cache()
Пример #20
0
        def __init__(self, V, **kwargs):
            # Call the parent initialization
            ParametrizedReducedDifferentialProblem_DerivedClass.__init__(
                self, V, **kwargs)

            # Populate problem name to problem map
            add_to_map_from_problem_name_to_problem(self.name(), self)

    # return value (a class) for the decorator
    return StoreMapFromProblemNameToProblem_Class


def add_to_map_from_problem_name_to_problem(problem_name, problem):
    if hasattr(type(problem), "__is_exact__"):
        assert type(problem).__is_exact__ is True
        problem_name = problem.__decorated_problem__.name()
        assert problem_name in _problem_name_to_problem_map
    else:
        if problem_name not in _problem_name_to_problem_map:
            _problem_name_to_problem_map[problem_name] = problem
        else:
            assert _problem_name_to_problem_map[problem_name] is problem


def get_problem_from_problem_name(problem_name):
    assert problem_name in _problem_name_to_problem_map
    return _problem_name_to_problem_map[problem_name]


_problem_name_to_problem_map = Cache()
    @PreserveClassName
    class StoreMapFromProblemToReducedProblem_Class(
            ParametrizedReducedDifferentialProblem_DerivedClass):
        def __init__(self, truth_problem, **kwargs):
            # Call the parent initialization
            ParametrizedReducedDifferentialProblem_DerivedClass.__init__(
                self, truth_problem, **kwargs)

            # Populate problem to reduced problem map
            add_to_map_from_problem_to_reduced_problem(truth_problem, self)

    # return value (a class) for the decorator
    return StoreMapFromProblemToReducedProblem_Class


def add_to_map_from_problem_to_reduced_problem(problem, reduced_problem):
    if problem not in _problem_to_reduced_problem_map:
        if hasattr(type(problem), "__is_exact__"):
            problem = problem.__decorated_problem__
        _problem_to_reduced_problem_map[problem] = reduced_problem
    else:
        assert _problem_to_reduced_problem_map[problem] is reduced_problem


def get_reduced_problem_from_problem(problem):
    assert problem in _problem_to_reduced_problem_map
    return _problem_to_reduced_problem_map[problem]


_problem_to_reduced_problem_map = Cache()
Пример #22
0
 def __init__(self, space):
     self.space = space
     self._enrich_memory = list()
     self._precomputed_slices = Cache(
     )  # from tuple to DelayedFunctionsList
            ExactParametrizedFunctionsDecoratedProblem_DerivedClass._init_operators(
                self)

            # Populate map from parametrized operators to (this) problem
            for (term, operator) in self.operator.items():
                if operator is not None:  # raised by assemble_operator if output computation is optional
                    for (q, operator_q) in enumerate(operator):
                        add_to_map_from_parametrized_operator_to_term_and_index(
                            operator_q, term, q)

    # return value (a class) for the decorator
    return StoreMapFromParametrizedOperatorsToTermAndIndex_Class


def add_to_map_from_parametrized_operator_to_term_and_index(
        operator, term, index):
    if operator not in _parametrized_operator_to_term_and_index_map:
        _parametrized_operator_to_term_and_index_map[operator] = (term, index)
    else:
        # for simple problems the same operator may correspond to more than one term, we only care about one
        # of them anyway since we are going to use this function to only export the term name
        pass


def get_term_and_index_from_parametrized_operator(operator):
    assert operator in _parametrized_operator_to_term_and_index_map
    return _parametrized_operator_to_term_and_index_map[operator]


_parametrized_operator_to_term_and_index_map = Cache()
Пример #24
0
    def __init__(self, V, **kwargs):
        # Call to parent
        StokesOptimalControlProblem_Base.__init__(self, V, **kwargs)

        # Form names for saddle point problems
        self.terms = [
            "a",
            "a*",
            "b",
            "b*",
            "bt",
            "bt*",
            "c",
            "c*",
            "m",
            "n",
            "f",
            "g",
            "h",
            "l",
            # Auxiliary terms for supremizer enrichment
            "bt_restricted",
            "bt*_restricted"
        ]
        self.terms_order = {
            "a": 2,
            "a*": 2,
            "b": 2,
            "b*": 2,
            "bt": 2,
            "bt*": 2,
            "c": 2,
            "c*": 2,
            "m": 2,
            "n": 2,
            "f": 1,
            "g": 1,
            "l": 1,
            "h": 0,
            # Auxiliary terms for supremizer enrichment
            "bt_restricted": 2,
            "bt*_restricted": 2
        }
        self.components = ["v", "s", "p", "u", "w", "r", "q"]

        # Auxiliary storage for supremizer enrichment, using a subspace of V
        self._supremizer = {"s": Function(V, "s"), "r": Function(V, "r")}

        # I/O
        def _supremizer_cache_key_generator(*args, **kwargs):
            assert len(args) is 1
            assert args[0] == self.mu
            return self._supremizer_cache_key_from_kwargs(**kwargs)

        def _supremizer_cache_import(component):
            def _supremizer_cache_import_impl(filename):
                supremizer = copy(self._supremizer[component])
                self.import_supremizer(self.folder["cache"],
                                       filename,
                                       supremizer,
                                       component=component)
                return supremizer

            return _supremizer_cache_import_impl

        def _supremizer_cache_export(component):
            def _supremizer_cache_export_impl(filename):
                self.export_supremizer(self.folder["cache"],
                                       filename,
                                       component=component)

            return _supremizer_cache_export_impl

        def _supremizer_cache_filename_generator(*args, **kwargs):
            assert len(args) is 1
            assert args[0] == self.mu
            return self._supremizer_cache_file_from_kwargs(**kwargs)

        self._supremizer_cache = {
            "s":
            Cache("problems",
                  key_generator=_supremizer_cache_key_generator,
                  import_=_supremizer_cache_import("s"),
                  export=_supremizer_cache_export("s"),
                  filename_generator=_supremizer_cache_filename_generator),
            "r":
            Cache("problems",
                  key_generator=_supremizer_cache_key_generator,
                  import_=_supremizer_cache_import("r"),
                  export=_supremizer_cache_export("r"),
                  filename_generator=_supremizer_cache_filename_generator)
        }
    def __init__(self,
                 truth_problem,
                 spectrum,
                 eigensolver_parameters,
                 folder_prefix,
                 expansion_index=None):
        # Call the parent initialization
        ParametrizedProblem.__init__(self, folder_prefix)
        self.truth_problem = truth_problem

        # Matrices/vectors resulting from the truth discretization
        self.expansion_index = expansion_index
        self.operator = {
            "stability_factor_left_hand_matrix":
            None,  # AffineExpansionStorage
            "stability_factor_right_hand_matrix":
            None  # AffineExpansionStorage, even though it will contain only one matrix
        }
        self.dirichlet_bc = None  # AffineExpansionStorage
        self.spectrum = spectrum
        self.eigensolver_parameters = eigensolver_parameters

        # Solution
        self._eigenvalue = 0.
        self._eigenvector = Function(truth_problem.stability_factor_V)
        # I/O
        self.folder["cache"] = os.path.join(folder_prefix, "cache")

        def _eigenvalue_cache_key_generator(*args, **kwargs):
            return args

        def _eigenvalue_cache_import(filename):
            self.import_eigenvalue(self.folder["cache"], filename)
            return self._eigenvalue

        def _eigenvalue_cache_export(filename):
            self.export_eigenvalue(self.folder["cache"], filename)

        def _eigenvalue_cache_filename_generator(*args, **kwargs):
            return self._cache_file(args)

        self._eigenvalue_cache = Cache(
            "problems",
            key_generator=_eigenvalue_cache_key_generator,
            import_=_eigenvalue_cache_import,
            export=_eigenvalue_cache_export,
            filename_generator=_eigenvalue_cache_filename_generator)

        def _eigenvector_cache_key_generator(*args, **kwargs):
            return args

        def _eigenvector_cache_import(filename):
            self.import_eigenvector(self.folder["cache"], filename)
            return self._eigenvector

        def _eigenvector_cache_export(filename):
            self.export_eigenvector(self.folder["cache"], filename)

        def _eigenvector_cache_filename_generator(*args, **kwargs):
            return self._cache_file(args)

        self._eigenvector_cache = Cache(
            "problems",
            key_generator=_eigenvector_cache_key_generator,
            import_=_eigenvector_cache_import,
            export=_eigenvector_cache_export,
            filename_generator=_eigenvector_cache_filename_generator)
Пример #26
0
def basic_form_on_truth_function_space(backend, wrapping):
    def _basic_form_on_truth_function_space(form_wrapper, tensor=None):
        form = form_wrapper._form
        form_name = form_wrapper.name()
        mu = get_problem_from_parametrized_operator(form_wrapper).mu

        if form_name not in form_on_truth_function_space__reduced_problem_to_truth_solution_cache:
            visited = set()
            truth_problems = list()
            truth_problem_to_components = dict()
            truth_problem_to_exact_truth_problem = dict()
            truth_problem_to_truth_solution = dict()
            reduced_problem_to_components = dict()
            reduced_problem_to_truth_solution = dict()

            # Look for terminals on truth mesh
            for node in wrapping.form_iterator(form):
                if node in visited:
                    continue
                # ... problem solutions related to nonlinear terms
                elif wrapping.is_problem_solution_or_problem_solution_component_type(
                        node):
                    if wrapping.is_problem_solution_or_problem_solution_component(
                            node):
                        (preprocessed_node, component, truth_solution
                         ) = wrapping.solution_identify_component(node)
                        truth_problem = get_problem_from_solution(
                            truth_solution)
                        truth_problems.append(truth_problem)
                        # Store the solution
                        truth_problem_to_truth_solution[
                            truth_problem] = truth_solution
                        # Store the component
                        if truth_problem not in truth_problem_to_components:
                            truth_problem_to_components[truth_problem] = list()
                        truth_problem_to_components[truth_problem].append(
                            component)
                    else:
                        preprocessed_node = node
                    # Make sure to skip any parent solution related to this one
                    visited.add(node)
                    visited.add(preprocessed_node)
                    for parent_node in wrapping.solution_iterator(
                            preprocessed_node):
                        visited.add(parent_node)

            # Cache the resulting dicts
            form_on_truth_function_space__truth_problems_cache[
                form_name] = truth_problems
            form_on_truth_function_space__truth_problem_to_components_cache[
                form_name] = truth_problem_to_components
            form_on_truth_function_space__truth_problem_to_exact_truth_problem_cache[
                form_name] = truth_problem_to_exact_truth_problem
            form_on_truth_function_space__truth_problem_to_truth_solution_cache[
                form_name] = truth_problem_to_truth_solution
            form_on_truth_function_space__reduced_problem_to_components_cache[
                form_name] = reduced_problem_to_components
            form_on_truth_function_space__reduced_problem_to_truth_solution_cache[
                form_name] = reduced_problem_to_truth_solution

        # Extract from cache
        truth_problems = form_on_truth_function_space__truth_problems_cache[
            form_name]
        truth_problem_to_components = form_on_truth_function_space__truth_problem_to_components_cache[
            form_name]
        truth_problem_to_exact_truth_problem = form_on_truth_function_space__truth_problem_to_exact_truth_problem_cache[
            form_name]
        truth_problem_to_truth_solution = form_on_truth_function_space__truth_problem_to_truth_solution_cache[
            form_name]
        reduced_problem_to_components = form_on_truth_function_space__reduced_problem_to_components_cache[
            form_name]
        reduced_problem_to_truth_solution = form_on_truth_function_space__reduced_problem_to_truth_solution_cache[
            form_name]

        # Get list of truth and reduced problems that need to be solved, possibly updating cache
        required_truth_problems = list()
        required_reduced_problems = list()
        for truth_problem in truth_problems:
            truth_problem_is_solving = hasattr(truth_problem, "_is_solving")
            if is_training_started(truth_problem):
                reduced_problem = get_reduced_problem_from_problem(
                    truth_problem)
                reduced_problem_is_solving = hasattr(reduced_problem,
                                                     "_is_solving")
            else:
                reduced_problem = None
                reduced_problem_is_solving = False
            if not truth_problem_is_solving:
                if is_training_finished(truth_problem):
                    # Store the component
                    if reduced_problem not in reduced_problem_to_components:
                        reduced_problem_to_components[
                            reduced_problem] = truth_problem_to_components[
                                truth_problem]
                    # Store the solution
                    if reduced_problem not in reduced_problem_to_truth_solution:
                        reduced_problem_to_truth_solution[
                            reduced_problem] = truth_problem_to_truth_solution[
                                truth_problem]
                    # Append to list of required reduced problems
                    required_reduced_problems.append(
                        (reduced_problem, reduced_problem_is_solving))
                else:
                    if (hasattr(truth_problem,
                                "_apply_exact_evaluation_at_stages") and
                            not hasattr(truth_problem, "_apply_EIM_at_stages")
                            and not hasattr(truth_problem,
                                            "_apply_DEIM_at_stages")):
                        # Init truth problem (if required), as it may not have been initialized
                        truth_problem.init()
                        # Append to list of required truth problems which are not currently solving
                        required_truth_problems.append(
                            (truth_problem, False, reduced_problem_is_solving))
                    else:
                        # Store the corresponding exact truth problem
                        if truth_problem not in truth_problem_to_exact_truth_problem:
                            exact_truth_problem = exact_problem(truth_problem)
                            truth_problem_to_exact_truth_problem[
                                truth_problem] = exact_truth_problem
                            # Init exact truth problem (if required), as it may not have been initialized
                            exact_truth_problem.init()
                        else:
                            exact_truth_problem = truth_problem_to_exact_truth_problem[
                                truth_problem]
                        # Store the component
                        if exact_truth_problem not in truth_problem_to_components:
                            truth_problem_to_components[
                                exact_truth_problem] = truth_problem_to_components[
                                    truth_problem]
                        # Store the solution
                        if exact_truth_problem not in truth_problem_to_truth_solution:
                            truth_problem_to_truth_solution[
                                exact_truth_problem] = truth_problem_to_truth_solution[
                                    truth_problem]
                        # Append to list of required truth problems which are not currently solving
                        required_truth_problems.append(
                            (exact_truth_problem, False,
                             reduced_problem_is_solving))
            else:
                assert not reduced_problem_is_solving
                # Append to list of required truth problems which are currently solving
                required_truth_problems.append((truth_problem, True, False))

        # Solve truth problems (which have not been reduced yet) associated to nonlinear terms
        truth_problem_to_truth_solution_copy = dict()
        for (truth_problem, truth_problem_is_solving,
             reduced_problem_is_solving) in required_truth_problems:
            if not reduced_problem_is_solving:
                # Solve (if necessary) ...
                truth_problem.set_mu(mu)
                if not truth_problem_is_solving:
                    log(
                        PROGRESS,
                        "In form_on_truth_function_space, requiring truth problem solve for problem "
                        + truth_problem.name())
                    truth_problem.solve()
                else:
                    log(
                        PROGRESS,
                        "In form_on_truth_function_space, loading current truth problem solution for problem "
                        + truth_problem.name())
            else:
                reduced_problem = get_reduced_problem_from_problem(
                    truth_problem)
                log(
                    PROGRESS,
                    "In form_on_truth_function_space, replacing current truth problem solution with reduced solution for problem "
                    + reduced_problem.truth_problem.name())
            # ... and assign to truth_solution
            truth_solution = truth_problem_to_truth_solution[truth_problem]
            truth_problem_to_truth_solution_copy[truth_problem] = backend.copy(
                truth_solution)
            for component in truth_problem_to_components[truth_problem]:
                solution_to = _sub_from_tuple(truth_solution, component)
                if not reduced_problem_is_solving:
                    solution_from = _sub_from_tuple(truth_problem._solution,
                                                    component)
                else:
                    solution_from = _sub_from_tuple(
                        reduced_problem.basis_functions[:reduced_problem.
                                                        _solution.N] *
                        reduced_problem._solution, component)
                backend.assign(solution_to, solution_from)

        # Solve reduced problems associated to nonlinear terms
        reduced_problem_to_truth_solution_copy = dict()
        for (reduced_problem, is_solving) in required_reduced_problems:
            # Solve (if necessary) ...
            reduced_problem.set_mu(mu)
            if not is_solving:
                log(
                    PROGRESS,
                    "In form_on_truth_function_space, requiring reduced problem solve for problem "
                    + reduced_problem.truth_problem.name())
                reduced_problem.solve()
            else:
                log(
                    PROGRESS,
                    "In form_on_truth_function_space, loading current reduced problem solution for problem "
                    + reduced_problem.truth_problem.name())
            # ... and assign to truth_solution
            truth_solution = reduced_problem_to_truth_solution[reduced_problem]
            reduced_problem_to_truth_solution_copy[
                reduced_problem] = backend.copy(truth_solution)
            for component in reduced_problem_to_components[reduced_problem]:
                solution_to = _sub_from_tuple(truth_solution, component)
                solution_from = _sub_from_tuple(
                    reduced_problem.basis_functions[:reduced_problem._solution.
                                                    N] *
                    reduced_problem._solution, component)
                backend.assign(solution_to, solution_from)

        # Assemble
        assembled_form = wrapping.assemble(form, tensor)
        assembled_form.generator = form_wrapper  # for I/O
        form_rank = assembled_form.rank()

        # Undo any side effect of truth problem solves
        for (truth_problem, _, _) in required_truth_problems:
            truth_solution = truth_problem_to_truth_solution[truth_problem]
            truth_solution_copy = truth_problem_to_truth_solution_copy[
                truth_problem]
            for component in truth_problem_to_components[truth_problem]:
                solution_to = _sub_from_tuple(truth_solution, component)
                solution_from = _sub_from_tuple(truth_solution_copy, component)
                backend.assign(solution_to, solution_from)

        # Undo any side effect of reduced problem solves
        for (reduced_problem, _) in required_reduced_problems:
            truth_solution = reduced_problem_to_truth_solution[reduced_problem]
            truth_solution_copy = reduced_problem_to_truth_solution_copy[
                reduced_problem]
            for component in reduced_problem_to_components[reduced_problem]:
                solution_to = _sub_from_tuple(truth_solution, component)
                solution_from = _sub_from_tuple(truth_solution_copy, component)
                backend.assign(solution_to, solution_from)

        # Return
        return (assembled_form, form_rank)

    form_on_truth_function_space__truth_problems_cache = Cache()
    form_on_truth_function_space__truth_problem_to_components_cache = Cache()
    form_on_truth_function_space__truth_problem_to_exact_truth_problem_cache = Cache(
    )
    form_on_truth_function_space__truth_problem_to_truth_solution_cache = Cache(
    )
    form_on_truth_function_space__reduced_problem_to_components_cache = Cache()
    form_on_truth_function_space__reduced_problem_to_truth_solution_cache = Cache(
    )

    return _basic_form_on_truth_function_space
Пример #27
0
class NonAffineExpansionStorage(AbstractNonAffineExpansionStorage):
    def __init__(self, *shape):
        self._shape = shape
        self._type = "empty"
        self._content = dict()
        self._precomputed_slices = Cache(
        )  # from tuple to NonAffineExpansionStorage
        assert len(shape) in (1, 2)
        if len(shape) is 1:
            self._smallest_key = 0
            self._largest_key = shape[0] - 1
        else:
            self._smallest_key = (0, 0)
            self._largest_key = (shape[0] - 1, shape[1] - 1)

    def save(self, directory, filename):
        # Get full directory name
        full_directory = Folders.Folder(os.path.join(str(directory), filename))
        full_directory.create()
        # Export depending on type
        TypeIO.save_file(self._type, full_directory, "type")
        assert self._type in ("basis_functions_matrix", "empty",
                              "error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22",
                              "functions_list", "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            # Save delayed functions
            delayed_functions = self._content[self._type]
            it = NonAffineExpansionStorageContent_Iterator(
                delayed_functions,
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                delayed_function = delayed_functions[it.multi_index]
                delayed_function.save(full_directory,
                                      "delayed_functions_" + str(it.index))
                it.iternext()
        elif self._type == "empty":
            pass
        elif self._type in ("error_estimation_operators_11",
                            "error_estimation_operators_21",
                            "error_estimation_operators_22"):
            # Save delayed functions
            delayed_function_type = {
                DelayedBasisFunctionsMatrix: "DelayedBasisFunctionsMatrix",
                DelayedLinearSolver: "DelayedLinearSolver"
            }
            assert len(self._content["delayed_functions"]) is 2
            for (index, delayed_functions) in enumerate(
                    self._content["delayed_functions"]):
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions,
                    flags=["c_index", "refs_ok"],
                    op_flags=["readonly"])
                while not it.finished:
                    delayed_function = delayed_functions[it.index]
                    DelayedFunctionsTypeIO.save_file(
                        delayed_function_type[type(delayed_function)],
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    DelayedFunctionsProblemNameIO.save_file(
                        delayed_function.get_problem_name(), full_directory,
                        "delayed_functions_" + str(index) + "_" +
                        str(it.index) + "_problem_name")
                    delayed_function.save(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_content")
                    it.iternext()
            ErrorEstimationInnerProductIO.save_file(
                get_reduced_problem_from_error_estimation_inner_product(
                    self._content["inner_product_matrix"]).truth_problem.name(
                    ), full_directory, "inner_product_matrix_problem_name")
        elif self._type == "operators":
            # Save truth content
            it = NonAffineExpansionStorageContent_Iterator(
                self._content["truth_operators"],
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                operator = self._content["truth_operators"][it.multi_index]
                assert isinstance(
                    operator, (AbstractParametrizedTensorFactory, NumericForm))
                if isinstance(operator, AbstractParametrizedTensorFactory):
                    problem_name = get_problem_from_parametrized_operator(
                        operator).name()
                    (term,
                     index) = get_term_and_index_from_parametrized_operator(
                         operator)
                    TruthContentItemIO.save_file(
                        "ParametrizedTensorFactory", full_directory,
                        "truth_operator_" + str(it.index) + "_type")
                    TruthContentItemIO.save_file(
                        (problem_name, term, index), full_directory,
                        "truth_operator_" + str(it.index))
                elif isinstance(operator, NumericForm):
                    TruthContentItemIO.save_file(
                        "NumericForm", full_directory,
                        "truth_operator_" + str(it.index) + "_type")
                    TruthContentItemIO.save_file(
                        operator, full_directory,
                        "truth_operator_" + str(it.index))
                else:
                    raise TypeError("Invalid operator type")
                it.iternext()
            assert "truth_operators_as_expansion_storage" in self._content
            # Save basis functions content
            assert len(self._content["basis_functions"]) in (0, 1, 2)
            BasisFunctionsContentLengthIO.save_file(
                len(self._content["basis_functions"]), full_directory,
                "basis_functions_length")
            for (index, basis_functions) in enumerate(
                    self._content["basis_functions"]):
                BasisFunctionsProblemNameIO.save_file(
                    get_reduced_problem_from_basis_functions(
                        basis_functions).truth_problem.name(), full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                BasisFunctionsProblemNameIO.save_file(
                    basis_functions._components_name, full_directory,
                    "basis_functions_" + str(index) + "_components_name")
        else:
            raise ValueError("Invalid type")

    def load(self, directory, filename):
        if self._type != "empty":  # avoid loading multiple times
            if self._type in ("basis_functions_matrix", "functions_list"):
                delayed_functions = self._content[self._type]
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions,
                    flags=["c_index", "multi_index", "refs_ok"],
                    op_flags=["readonly"])
                while not it.finished:
                    if isinstance(delayed_functions[it.multi_index],
                                  DelayedFunctionsList):
                        assert self._type == "functions_list"
                        if len(
                                delayed_functions[it.multi_index]
                        ) > 0:  # ... unless it is an empty FunctionsList
                            return False
                    elif isinstance(delayed_functions[it.multi_index],
                                    DelayedBasisFunctionsMatrix):
                        assert self._type == "basis_functions_matrix"
                        if sum(
                                delayed_functions[it.multi_index].
                                _component_name_to_basis_component_length.
                                values()
                        ) > 0:  # ... unless it is an empty BasisFunctionsMatrix
                            return False
                    else:
                        raise TypeError("Invalid delayed functions")
                    it.iternext()
            else:
                return False
        # Get full directory name
        full_directory = Folders.Folder(os.path.join(str(directory), filename))
        # Detect trivial case
        assert TypeIO.exists_file(full_directory, "type")
        imported_type = TypeIO.load_file(full_directory, "type")
        self._type = imported_type
        assert self._type in ("basis_functions_matrix", "empty",
                              "error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22",
                              "functions_list", "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            # Load delayed functions
            assert self._type in self._content
            delayed_functions = self._content[self._type]
            it = NonAffineExpansionStorageContent_Iterator(
                delayed_functions, flags=["c_index", "multi_index", "refs_ok"])
            while not it.finished:
                delayed_function = delayed_functions[it.multi_index]
                delayed_function.load(full_directory,
                                      "delayed_functions_" + str(it.index))
                it.iternext()
        elif self._type == "empty":
            pass
        elif self._type in ("error_estimation_operators_11",
                            "error_estimation_operators_21",
                            "error_estimation_operators_22"):
            # Load delayed functions
            assert "delayed_functions" not in self._content
            self._content["delayed_functions"] = [
                NonAffineExpansionStorageContent_Base(self._shape[0],
                                                      dtype=object),
                NonAffineExpansionStorageContent_Base(self._shape[1],
                                                      dtype=object)
            ]
            for (index, delayed_functions) in enumerate(
                    self._content["delayed_functions"]):
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions, flags=["c_index", "refs_ok"])
                while not it.finished:
                    assert DelayedFunctionsTypeIO.exists_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    delayed_function_type = DelayedFunctionsTypeIO.load_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    assert DelayedFunctionsProblemNameIO.exists_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_problem_name")
                    delayed_function_problem_name = DelayedFunctionsProblemNameIO.load_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_problem_name")
                    delayed_function_problem = get_problem_from_problem_name(
                        delayed_function_problem_name)
                    assert delayed_function_type in (
                        "DelayedBasisFunctionsMatrix", "DelayedLinearSolver")
                    if delayed_function_type == "DelayedBasisFunctionsMatrix":
                        delayed_function = DelayedBasisFunctionsMatrix(
                            delayed_function_problem.V)
                        delayed_function.init(
                            delayed_function_problem.components)
                    elif delayed_function_type == "DelayedLinearSolver":
                        delayed_function = DelayedLinearSolver()
                    else:
                        raise ValueError("Invalid delayed function")
                    delayed_function.load(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_content")
                    delayed_functions[it.index] = delayed_function
                    it.iternext()
            # Load inner product
            assert ErrorEstimationInnerProductIO.exists_file(
                full_directory, "inner_product_matrix_problem_name")
            inner_product_matrix_problem_name = ErrorEstimationInnerProductIO.load_file(
                full_directory, "inner_product_matrix_problem_name")
            inner_product_matrix_problem = get_problem_from_problem_name(
                inner_product_matrix_problem_name)
            inner_product_matrix_reduced_problem = get_reduced_problem_from_problem(
                inner_product_matrix_problem)
            self._content[
                "inner_product_matrix"] = inner_product_matrix_reduced_problem._error_estimation_inner_product
            # Recompute shape
            assert "delayed_functions_shape" not in self._content
            self._content["delayed_functions_shape"] = DelayedTransposeShape(
                (self._content["delayed_functions"][0][0],
                 self._content["delayed_functions"][1][0]))
            # Prepare precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice()
        elif self._type == "empty":
            pass
        elif self._type == "operators":
            # Load truth content
            assert "truth_operators" not in self._content
            self._content[
                "truth_operators"] = NonAffineExpansionStorageContent_Base(
                    self._shape, dtype=object)
            it = NonAffineExpansionStorageContent_Iterator(
                self._content["truth_operators"],
                flags=["c_index", "multi_index", "refs_ok"])
            while not it.finished:
                assert TruthContentItemIO.exists_file(
                    full_directory,
                    "truth_operator_" + str(it.index) + "_type")
                operator_type = TruthContentItemIO.load_file(
                    full_directory,
                    "truth_operator_" + str(it.index) + "_type")
                assert operator_type in ("NumericForm",
                                         "ParametrizedTensorFactory")
                if operator_type == "NumericForm":
                    assert TruthContentItemIO.exists_file(
                        full_directory, "truth_operator_" + str(it.index))
                    value = TruthContentItemIO.load_file(
                        full_directory, "truth_operator_" + str(it.index))
                    self._content["truth_operators"][
                        it.multi_index] = NumericForm(value)
                elif operator_type == "ParametrizedTensorFactory":
                    assert TruthContentItemIO.exists_file(
                        full_directory, "truth_operator_" + str(it.index))
                    (problem_name, term, index) = TruthContentItemIO.load_file(
                        full_directory, "truth_operator_" + str(it.index))
                    truth_problem = get_problem_from_problem_name(problem_name)
                    self._content["truth_operators"][
                        it.multi_index] = truth_problem.operator[term][index]
                else:
                    raise ValueError("Invalid operator type")
                it.iternext()
            assert "truth_operators_as_expansion_storage" not in self._content
            self._prepare_truth_operators_as_expansion_storage()
            # Load basis functions content
            assert BasisFunctionsContentLengthIO.exists_file(
                full_directory, "basis_functions_length")
            basis_functions_length = BasisFunctionsContentLengthIO.load_file(
                full_directory, "basis_functions_length")
            assert basis_functions_length in (0, 1, 2)
            assert "basis_functions" not in self._content
            self._content["basis_functions"] = list()
            for index in range(basis_functions_length):
                assert BasisFunctionsProblemNameIO.exists_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                basis_functions_problem_name = BasisFunctionsProblemNameIO.load_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                assert BasisFunctionsProblemNameIO.exists_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_components_name")
                basis_functions_components_name = BasisFunctionsProblemNameIO.load_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_components_name")
                basis_functions_problem = get_problem_from_problem_name(
                    basis_functions_problem_name)
                basis_functions_reduced_problem = get_reduced_problem_from_problem(
                    basis_functions_problem)
                basis_functions = basis_functions_reduced_problem.basis_functions
                if basis_functions_components_name != basis_functions_problem.components:
                    basis_functions = basis_functions[
                        basis_functions_components_name]
                self._content["basis_functions"].append(basis_functions)
            # Recompute shape
            self._content["basis_functions_shape"] = DelayedTransposeShape(
                self._content["basis_functions"])
            # Reset precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice()
        else:
            raise ValueError("Invalid type")
        return True

    def _prepare_trivial_precomputed_slice(self):
        empty_slice = slice(None)
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22", "operators")
        if self._type == "error_estimation_operators_11":
            pass  # nothing to be done (scalar content)
        elif self._type == "error_estimation_operators_21":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], empty_slice,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)
            self._precomputed_slices[slice_] = self
        elif self._type == "error_estimation_operators_22":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"],
                (empty_slice, empty_slice),
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)
            self._precomputed_slices[slice_] = self
        elif self._type == "operators":
            assert len(self._content["basis_functions"]) in (0, 1, 2)
            assert "basis_functions_shape" in self._content

            if len(self._content["basis_functions"]) is 0:
                pass  # nothing to be done (scalar content)
            elif len(self._content["basis_functions"]) is 1:
                slice_ = slice_to_array(
                    self._content["basis_functions_shape"], empty_slice,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_length,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_index)
                self._precomputed_slices[slice_] = self
            elif len(self._content["basis_functions"]) is 2:
                slices = slice_to_array(
                    self._content["basis_functions_shape"],
                    (empty_slice, empty_slice),
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_length,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_index)
                self._precomputed_slices[slices] = self
            else:
                raise ValueError("Invalid length")
        else:
            raise ValueError("Invalid type")

    @overload(
        slice, )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_21", "operators")
        if self._type == "error_estimation_operators_21":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], key,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["inner_product_matrix"] = self._content[
                    "inner_product_matrix"]
                output._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                for q in range(self._shape[0]):
                    output._content["delayed_functions"][0][q] = self._content[
                        "delayed_functions"][0][q][key]
                for q in range(self._shape[1]):
                    output._content["delayed_functions"][1][q] = self._content[
                        "delayed_functions"][1][q]
                output._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (output._content["delayed_functions"][0][0],
                         output._content["delayed_functions"][1][0]))
                self._precomputed_slices[slice_] = output
                return output
        elif self._type == "operators":
            assert "basis_functions" in self._content
            assert len(self._content["basis_functions"]) is 1
            assert "basis_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["basis_functions_shape"], key,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["truth_operators"] = self._content[
                    "truth_operators"]
                output._content[
                    "truth_operators_as_expansion_storage"] = self._content[
                        "truth_operators_as_expansion_storage"]
                output._content["basis_functions"] = list()
                output._content["basis_functions"].append(
                    self._content["basis_functions"][0][key])
                output._content[
                    "basis_functions_shape"] = DelayedTransposeShape(
                        output._content["basis_functions"])
                self._precomputed_slices[slice_] = output
                return output
        else:
            raise ValueError("Invalid type")

    @overload(
        tuple_of(slice), )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_22", "operators")
        if self._type == "error_estimation_operators_22":
            assert len(key) is 2
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], key,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["inner_product_matrix"] = self._content[
                    "inner_product_matrix"]
                output._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                for q in range(self._shape[0]):
                    output._content["delayed_functions"][0][q] = self._content[
                        "delayed_functions"][0][q][key[0]]
                for q in range(self._shape[1]):
                    output._content["delayed_functions"][1][q] = self._content[
                        "delayed_functions"][1][q][key[1]]
                output._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (output._content["delayed_functions"][0][0],
                         output._content["delayed_functions"][1][0]))
                self._precomputed_slices[slice_] = output
                return output
        elif self._type == "operators":
            assert len(key) is 2
            assert "basis_functions" in self._content
            assert len(self._content["basis_functions"]) is 2
            assert "basis_functions_shape" in self._content

            slices = slice_to_array(
                self._content["basis_functions_shape"], key,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_index)

            if slices in self._precomputed_slices:
                return self._precomputed_slices[slices]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["truth_operators"] = self._content[
                    "truth_operators"]
                output._content[
                    "truth_operators_as_expansion_storage"] = self._content[
                        "truth_operators_as_expansion_storage"]
                output._content["basis_functions"] = list()
                output._content["basis_functions"].append(
                    self._content["basis_functions"][0][key[0]])
                output._content["basis_functions"].append(
                    self._content["basis_functions"][1][key[1]])
                output._content[
                    "basis_functions_shape"] = DelayedTransposeShape(
                        output._content["basis_functions"])
                self._precomputed_slices[slices] = output
                return output
        else:
            raise ValueError("Invalid type")

    @overload(
        int, )
    def __getitem__(self, key):
        assert self._type in ("basis_functions_matrix", "functions_list",
                              "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            return self._content[self._type][key]
        elif self._type == "operators":
            return self._delay_transpose(self._content["basis_functions"],
                                         self._content["truth_operators"][key])
        else:
            raise ValueError("Invalid type")

    @overload(
        tuple_of(int), )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22")
        return self._delay_transpose(
            (self._content["delayed_functions"][0][key[0]],
             self._content["delayed_functions"][1][key[1]]),
            self._content["inner_product_matrix"])

    def __iter__(self):
        assert self._type in ("basis_functions_matrix", "functions_list",
                              "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            return self._content[self._type].__iter__()
        elif self._type == "operators":
            return (self._delay_transpose(self._content["basis_functions"], op)
                    for op in self._content["truth_operators"].__iter__())
        else:
            raise ValueError("Invalid type")

    @overload((int, tuple_of(int)), AbstractBasisFunctionsMatrix)
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "basis_functions_matrix"
        else:
            self._type = "basis_functions_matrix"
            self._content[self._type] = NonAffineExpansionStorageContent_Base(
                self._shape, dtype=object)
        self._content[self._type][key] = DelayedBasisFunctionsMatrix(
            item.space)
        self._content[self._type][key].init(item._components_name)

    @overload((int, tuple_of(int)), AbstractFunctionsList)
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "functions_list"
        else:
            self._type = "functions_list"
            self._content[self._type] = NonAffineExpansionStorageContent_Base(
                self._shape, dtype=object)
        self._content[self._type][key] = DelayedFunctionsList(item.space)

    @overload((int, tuple_of(int)), DelayedTranspose)
    def __setitem__(self, key, item):
        assert isinstance(item._args[0],
                          (AbstractBasisFunctionsMatrix,
                           DelayedBasisFunctionsMatrix, DelayedLinearSolver))
        if isinstance(item._args[0], AbstractBasisFunctionsMatrix):
            if self._type != "empty":
                assert self._type == "operators"
            else:
                self._type = "operators"
            # Reset attributes if size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content.pop("truth_operators_as_expansion_storage", None)
                self._content[
                    "truth_operators"] = NonAffineExpansionStorageContent_Base(
                        self._shape, dtype=object)
                self._content["basis_functions"] = list()
                self._content.pop("basis_functions_shape", None)
            # Store
            assert len(item._args) in (2, 3)
            if len(self._content["basis_functions"]) is 0:
                assert isinstance(item._args[0], AbstractBasisFunctionsMatrix)
                self._content["basis_functions"].append(item._args[0])
            else:
                assert item._args[0] is self._content["basis_functions"][0]
            self._content["truth_operators"][key] = item._args[1]
            if len(item._args) > 2:
                if len(self._content["basis_functions"]) is 1:
                    assert isinstance(item._args[2],
                                      AbstractBasisFunctionsMatrix)
                    self._content["basis_functions"].append(item._args[2])
                else:
                    assert item._args[2] is self._content["basis_functions"][1]
            # Recompute shape
            if "basis_functions_shape" not in self._content:
                self._content["basis_functions_shape"] = DelayedTransposeShape(
                    self._content["basis_functions"])
            # Compute truth expansion storage and prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._prepare_truth_operators_as_expansion_storage()
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice()
        elif isinstance(item._args[0],
                        (DelayedBasisFunctionsMatrix, DelayedLinearSolver)):
            assert len(item._args) is 3
            assert isinstance(
                item._args[2],
                (DelayedBasisFunctionsMatrix, DelayedLinearSolver))
            if isinstance(item._args[0], DelayedLinearSolver):
                assert isinstance(item._args[2], DelayedLinearSolver)
                if self._type != "empty":
                    assert self._type == "error_estimation_operators_11"
                else:
                    self._type = "error_estimation_operators_11"
            elif isinstance(item._args[0], DelayedBasisFunctionsMatrix):
                if isinstance(item._args[2], DelayedLinearSolver):
                    if self._type != "empty":
                        assert self._type == "error_estimation_operators_21"
                    else:
                        self._type = "error_estimation_operators_21"
                elif isinstance(item._args[2], DelayedBasisFunctionsMatrix):
                    if self._type != "empty":
                        assert self._type == "error_estimation_operators_22"
                    else:
                        self._type = "error_estimation_operators_22"
                else:
                    raise TypeError(
                        "Invalid arguments to NonAffineExpansionStorage")
            else:
                raise TypeError(
                    "Invalid arguments to NonAffineExpansionStorage")
            # Reset attributes if size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                self._content.pop("delayed_functions_shape", None)
                self._content.pop("inner_product_matrix", None)
            # Store
            if key[1] == self._smallest_key[
                    1]:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"][0][key[0]] = item._args[0]
            else:
                assert item._args[0] is self._content["delayed_functions"][0][
                    key[0]]
            if "inner_product_matrix" not in self._content:
                self._content["inner_product_matrix"] = item._args[1]
            else:
                assert item._args[1] is self._content["inner_product_matrix"]
            if key[0] == self._smallest_key[
                    0]:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"][1][key[1]] = item._args[2]
            else:
                assert item._args[2] is self._content["delayed_functions"][1][
                    key[1]]
            # Recompute shape
            if "delayed_functions_shape" not in self._content:
                self._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (item._args[0], item._args[2]))
            else:
                assert DelayedTransposeShape((
                    item._args[0],
                    item._args[2])) == self._content["delayed_functions_shape"]
            # Prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice()
        else:
            raise TypeError("Invalid arguments to NonAffineExpansionStorage")

    @overload((int, tuple_of(int)),
              (AbstractParametrizedTensorFactory, Number))
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "operators"
        else:
            self._type = "operators"
        # Reset attributes, similarly to what is done for Vector and Matrix operators
        if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
            self._content.pop("truth_operators_as_expansion_storage", None)
            self._content[
                "truth_operators"] = NonAffineExpansionStorageContent_Base(
                    self._shape, dtype=object)
            self._content["basis_functions"] = list()  # will stay empty
            self._content.pop("basis_functions_shape", None)
        # Store
        if isinstance(item, Number):
            self._content["truth_operators"][key] = NumericForm(item)
        else:
            assert isinstance(item, AbstractParametrizedTensorFactory)
            assert len(item._spaces) is 0
            self._content["truth_operators"][key] = item
        # Recompute (trivial) shape
        if "basis_functions_shape" not in self._content:
            self._content["basis_functions_shape"] = DelayedTransposeShape(
                self._content["basis_functions"])
        # Compute truth expansion storage and prepare precomputed slices
        if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
            self._prepare_truth_operators_as_expansion_storage()

    def _prepare_truth_operators_as_expansion_storage(self):
        from rbnics.backends import NonAffineExpansionStorage
        assert self._type == "operators"
        assert self.order() is 1
        extracted_operators = tuple(op._form
                                    for op in self._content["truth_operators"])
        assert "truth_operators_as_expansion_storage" not in self._content
        self._content[
            "truth_operators_as_expansion_storage"] = NonAffineExpansionStorage(
                extracted_operators)
        if not all(isinstance(op, Number) for op in extracted_operators):
            problems = [
                get_problem_from_parametrized_operator(op)
                for op in self._content["truth_operators"]
            ]
            assert all([problem is problems[0] for problem in problems])
            for extracted_operator in self._content[
                    "truth_operators_as_expansion_storage"]:
                add_to_map_from_parametrized_operator_to_problem(
                    extracted_operator, problems[0])

    def __len__(self):
        assert self._type == "operators"
        assert self.order() is 1
        return self._shape[0]

    def order(self):
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22", "operators")
        return len(self._shape)

    def _delay_transpose(self, pre_post, op):
        assert len(pre_post) in (0, 1, 2)
        if len(pre_post) is 0:
            return op
        elif len(pre_post) is 1:
            return DelayedTranspose(pre_post[0]) * op
        else:
            return DelayedTranspose(pre_post[0]) * op * pre_post[1]
Пример #28
0
                            self]:
                        if hasattr(expression_, "t"):
                            if expression_.t is not t:
                                assert isinstance(expression_.t, Number)
                                expression_.t = t

                return overridden_set_time

            if (
                    "set_time" in _original_setters
                    and truth_problem in _original_setters["set_time"]
            ):  # truth_problem.set_time was already patched by the decorator @sync_setters
                standard_set_time = _original_setters["set_time"][
                    truth_problem]
                overridden_set_time = generate_overridden_set_time(
                    standard_set_time)
                _original_setters["set_time"][
                    truth_problem] = types.MethodType(overridden_set_time,
                                                      truth_problem)
            else:
                standard_set_time = truth_problem.set_time
                overridden_set_time = generate_overridden_set_time(
                    standard_set_time)
                PatchInstanceMethod(truth_problem, "set_time",
                                    overridden_set_time).patch()

    return expression


_truth_problem_to_parametrized_expressions = Cache()
# Copyright (C) 2015-2020 by the RBniCS authors
#
# This file is part of RBniCS.
#
# SPDX-License-Identifier: LGPL-3.0-or-later

import inspect
from rbnics.utils.cache import Cache
from rbnics.utils.jupyter import is_jupyter


def CustomizeReductionMethodFor(Problem):
    assert inspect.isabstract(Problem), (
        "It is suggested to use this customizer for abstract classes (e.g., before specifying theta terms"
        + " and operators, or decorating with EIM or SCM), because otherwise the customization would not"
        + " be preserved with a call to exact_problem.")

    def CustomizeReductionMethodFor_Decorator(customizer):
        if not is_jupyter():
            assert Problem not in _cache
        _cache[Problem] = customizer
        return customizer

    return CustomizeReductionMethodFor_Decorator


_cache = Cache()  # from Problem to decorator
Пример #30
0
class DelayedFunctionsList(object):
    def __init__(self, space):
        self.space = space
        self._enrich_memory = list()
        self._precomputed_slices = Cache(
        )  # from tuple to DelayedFunctionsList

    def enrich(self, function, component=None, weight=None, copy=True):
        assert component is None
        assert weight is None
        assert copy is True
        # Append to storage
        self._enrich(function)
        # Reset precomputed slices
        self._precomputed_slices.clear()
        # Prepare trivial precomputed slice
        self._precomputed_slices[0, len(self._enrich_memory)] = self

    @overload(DelayedLinearSolver)
    def _enrich(self, function):
        self._enrich_memory.append(function)

    @overload(lambda cls: cls)
    def _enrich(self, other):
        assert self.space is other.space
        self._enrich_memory.extend(other._enrich_memory)

    @overload(int)
    def __getitem__(self, key):
        return self._enrich_memory[key]

    @overload(slice)  # e.g. key = :N, return the first N functions
    def __getitem__(self, key):
        if key.start is not None:
            start = key.start
        else:
            start = 0
        assert key.step is None
        if key.stop is not None:
            stop = key.stop
        else:
            stop = len(self._enrich_memory)

        assert start <= stop
        if start < stop:
            assert start >= 0
            assert start < len(self._enrich_memory)
            assert stop > 0
            assert stop <= len(self._enrich_memory)
        # elif start == stop
        #    trivial case which will result in an empty FunctionsList

        if (start, stop) not in self._precomputed_slices:
            output = DelayedFunctionsList(self.space)
            if start < stop:
                output._enrich_memory = self._enrich_memory[key]
            self._precomputed_slices[start, stop] = output
        return self._precomputed_slices[start, stop]

    def __len__(self):
        return len(self._enrich_memory)

    def save(self, directory, filename):
        LengthIO.save_file(len(self._enrich_memory), directory,
                           filename + "_length")
        for (index, memory) in enumerate(self._enrich_memory):
            memory.save(directory, filename + "_" + str(index))

    def load(self, directory, filename):
        if len(self._enrich_memory) > 0:  # avoid loading multiple times
            return False
        else:
            assert LengthIO.exists_file(directory, filename + "_length")
            len_memory = LengthIO.load_file(directory, filename + "_length")
            for index in range(len_memory):
                memory = DelayedLinearSolver()
                memory.load(directory, filename + "_" + str(index))
                self.enrich(memory)
            return True

    def get_problem_name(self):
        problem_name = None
        for memory in self._enrich_memory:
            if problem_name is None:
                problem_name = memory.get_problem_name()
            else:
                assert memory.get_problem_name() == problem_name
        return problem_name