class DelayedBasisFunctionsMatrix(object):
    def __init__(self, space):
        self.space = space
        self._components_name = list()
        self._component_name_to_basis_component_index = ComponentNameToBasisComponentIndexDict(
        )
        self._component_name_to_basis_component_length = OnlineSizeDict()
        self._enrich_memory = Cache()
        self._precomputed_slices = Cache()  # from tuple to FunctionsList

    def init(self, components_name):

        # Patch DelayedFunctionsList.enrich() to update internal attributes
        def patch_delayed_functions_list_enrich(component_name, memory):
            original_delayed_functions_list_enrich = memory.enrich

            def patched_delayed_functions_list_enrich(self_,
                                                      functions,
                                                      component=None,
                                                      weights=None,
                                                      copy=True):
                # Append to storage
                original_delayed_functions_list_enrich(functions, component,
                                                       weights, copy)
                # Update component name to basis component length
                if component is not None:
                    if isinstance(component, dict):
                        assert len(component) == 1
                        for (_, component_to) in component.items():
                            break
                        assert component_name == component_to
                    else:
                        assert component_name == component
                self._update_component_name_to_basis_component_length(
                    component_name)
                # Reset precomputed slices
                self._precomputed_slices.clear()
                # Prepare trivial precomputed slice
                self._prepare_trivial_precomputed_slice()

            memory.enrich_patch = PatchInstanceMethod(
                memory, "enrich", patched_delayed_functions_list_enrich)
            memory.enrich_patch.patch()

        assert len(self._components_name) == 0
        self._components_name = components_name
        for (basis_component_index,
             component_name) in enumerate(components_name):
            self._component_name_to_basis_component_index[
                component_name] = basis_component_index
            self._component_name_to_basis_component_length[component_name] = 0
            self._enrich_memory[component_name] = DelayedFunctionsList(
                self.space)
            patch_delayed_functions_list_enrich(
                component_name, self._enrich_memory[component_name])

    def enrich(self, function, component=None, weight=None, copy=True):
        assert isinstance(function, DelayedLinearSolver)
        assert component is None
        assert weight is None
        assert copy is True
        assert len(self._components_name) == 1
        assert len(self._enrich_memory) == 1
        component_0 = self._components_name[0]
        # Append to storage
        self._enrich_memory[component_0].enrich(function, component, weight,
                                                copy)

    @overload(None)
    def _update_component_name_to_basis_component_length(self, component):
        assert len(self._enrich_memory) == 1
        assert len(self._components_name) == 1
        component_0 = self._components_name[0]
        self._component_name_to_basis_component_length[component_0] = len(
            self._enrich_memory[component_0])

    @overload(str)
    def _update_component_name_to_basis_component_length(self, component):
        self._component_name_to_basis_component_length[component] = len(
            self._enrich_memory[component])

    def _prepare_trivial_precomputed_slice(self):
        if len(self._enrich_memory) == 1:
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            precomputed_slice_key_start = 0
            precomputed_slice_key_stop = self._component_name_to_basis_component_length[
                component_0]
        else:
            precomputed_slice_key_start = list()
            precomputed_slice_key_stop = list()
            for component_name in self._components_name:
                precomputed_slice_key_start.append(0)
                precomputed_slice_key_stop.append(
                    self.
                    _component_name_to_basis_component_length[component_name])
            precomputed_slice_key_start = tuple(precomputed_slice_key_start)
            precomputed_slice_key_stop = tuple(precomputed_slice_key_stop)
        self._precomputed_slices[precomputed_slice_key_start,
                                 precomputed_slice_key_stop] = self

    @overload(slice)  # e.g. key = :N, return the first N functions
    def __getitem__(self, key):
        assert key.step is None
        return self._precompute_slice(key.start, key.stop)

    @overload(str)
    def __getitem__(self, key):
        return self._enrich_memory[key]

    def __len__(self):
        assert len(self._components_name) == 1
        assert len(self._enrich_memory) == 1
        component_0 = self._components_name[0]
        return self._component_name_to_basis_component_length[component_0]

    @overload(None, int)
    def _precompute_slice(self, _, N_stop):
        return self._precompute_slice(0, N_stop)

    @overload(int, None)
    def _precompute_slice(self, N_start, _):
        return self._precompute_slice(N_start, len(self))

    @overload(int, int)
    def _precompute_slice(self, N_start, N_stop):
        if (N_start, N_stop) not in self._precomputed_slices:
            assert len(self._enrich_memory) == 1
            output = DelayedBasisFunctionsMatrix(self.space)
            output.init(self._components_name)
            for component_name in self._components_name:
                output._enrich_memory[component_name].enrich(
                    self._enrich_memory[component_name][N_start:N_stop])
            self._precomputed_slices[N_start, N_stop] = output
        return self._precomputed_slices[N_start, N_stop]

    @overload(None, OnlineSizeDict)
    def _precompute_slice(self, _, N_stop):
        N_start = OnlineSizeDict()
        for component_name in self._components_name:
            N_start[component_name] = 0
        return self._precompute_slice(N_start, N_stop)

    @overload(OnlineSizeDict, None)
    def _precompute_slice(self, N_start, _):
        N_stop = OnlineSizeDict()
        for component_name in self._components_name:
            N_stop[
                component_name] = self._component_name_to_basis_component_length[
                    component_name]
        return self._precompute_slice(N_start, len(self))

    @overload(OnlineSizeDict, OnlineSizeDict)
    def _precompute_slice(self, N_start, N_stop):
        assert set(N_start.keys()) == set(self._components_name)
        assert set(N_stop.keys()) == set(self._components_name)
        N_start_key = tuple(N_start[component_name]
                            for component_name in self._components_name)
        N_stop_key = tuple(N_stop[component_name]
                           for component_name in self._components_name)
        if (N_start_key, N_stop_key) not in self._precomputed_slices:
            output = DelayedBasisFunctionsMatrix(self.space)
            output.init(self._components_name)
            for component_name in self._components_name:
                output._enrich_memory[component_name].enrich(
                    self._enrich_memory[component_name]
                    [N_start[component_name]:N_stop[component_name]])
            self._precomputed_slices[N_start_key, N_stop_key] = output
        return self._precomputed_slices[N_start_key, N_stop_key]

    def save(self, directory, filename):
        for (component, memory) in self._enrich_memory.items():
            memory.save(directory, filename + "_" + component)

    def load(self, directory, filename):
        return_value = True
        for (component, memory) in self._enrich_memory.items():
            # Skip updating internal attributes while reading in basis functions, we will do that
            # only once at the end
            assert hasattr(memory, "enrich_patch")
            memory.enrich_patch.unpatch()
            # Load each component
            return_value_component = memory.load(directory,
                                                 filename + "_" + component)
            return_value = return_value and return_value_component
            # Populate component length
            self._update_component_name_to_basis_component_length(component)
            # Restore patched enrich method
            memory.enrich_patch.patch()
        # Reset precomputed slices
        self._precomputed_slices.clear()
        # Prepare trivial precomputed slice
        self._prepare_trivial_precomputed_slice()
        return return_value

    def get_problem_name(self):
        problem_name = None
        for (_, memory) in self._enrich_memory.items():
            if problem_name is None:
                problem_name = memory.get_problem_name()
            else:
                assert memory.get_problem_name() == problem_name
        return problem_name
class NonAffineExpansionStorage(AbstractNonAffineExpansionStorage):
    def __init__(self, *shape):
        self._shape = shape
        self._type = "empty"
        self._content = dict()
        self._precomputed_slices = Cache(
        )  # from tuple to NonAffineExpansionStorage
        assert len(shape) in (1, 2)
        if len(shape) is 1:
            self._smallest_key = 0
            self._largest_key = shape[0] - 1
        else:
            self._smallest_key = (0, 0)
            self._largest_key = (shape[0] - 1, shape[1] - 1)

    def save(self, directory, filename):
        # Get full directory name
        full_directory = Folders.Folder(os.path.join(str(directory), filename))
        full_directory.create()
        # Export depending on type
        TypeIO.save_file(self._type, full_directory, "type")
        assert self._type in ("basis_functions_matrix", "empty",
                              "error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22",
                              "functions_list", "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            # Save delayed functions
            delayed_functions = self._content[self._type]
            it = NonAffineExpansionStorageContent_Iterator(
                delayed_functions,
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                delayed_function = delayed_functions[it.multi_index]
                delayed_function.save(full_directory,
                                      "delayed_functions_" + str(it.index))
                it.iternext()
        elif self._type == "empty":
            pass
        elif self._type in ("error_estimation_operators_11",
                            "error_estimation_operators_21",
                            "error_estimation_operators_22"):
            # Save delayed functions
            delayed_function_type = {
                DelayedBasisFunctionsMatrix: "DelayedBasisFunctionsMatrix",
                DelayedLinearSolver: "DelayedLinearSolver"
            }
            assert len(self._content["delayed_functions"]) is 2
            for (index, delayed_functions) in enumerate(
                    self._content["delayed_functions"]):
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions,
                    flags=["c_index", "refs_ok"],
                    op_flags=["readonly"])
                while not it.finished:
                    delayed_function = delayed_functions[it.index]
                    DelayedFunctionsTypeIO.save_file(
                        delayed_function_type[type(delayed_function)],
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    DelayedFunctionsProblemNameIO.save_file(
                        delayed_function.get_problem_name(), full_directory,
                        "delayed_functions_" + str(index) + "_" +
                        str(it.index) + "_problem_name")
                    delayed_function.save(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_content")
                    it.iternext()
            ErrorEstimationInnerProductIO.save_file(
                get_reduced_problem_from_error_estimation_inner_product(
                    self._content["inner_product_matrix"]).truth_problem.name(
                    ), full_directory, "inner_product_matrix_problem_name")
        elif self._type == "operators":
            # Save truth content
            it = NonAffineExpansionStorageContent_Iterator(
                self._content["truth_operators"],
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                operator = self._content["truth_operators"][it.multi_index]
                assert isinstance(
                    operator, (AbstractParametrizedTensorFactory, NumericForm))
                if isinstance(operator, AbstractParametrizedTensorFactory):
                    problem_name = get_problem_from_parametrized_operator(
                        operator).name()
                    (term,
                     index) = get_term_and_index_from_parametrized_operator(
                         operator)
                    TruthContentItemIO.save_file(
                        "ParametrizedTensorFactory", full_directory,
                        "truth_operator_" + str(it.index) + "_type")
                    TruthContentItemIO.save_file(
                        (problem_name, term, index), full_directory,
                        "truth_operator_" + str(it.index))
                elif isinstance(operator, NumericForm):
                    TruthContentItemIO.save_file(
                        "NumericForm", full_directory,
                        "truth_operator_" + str(it.index) + "_type")
                    TruthContentItemIO.save_file(
                        operator, full_directory,
                        "truth_operator_" + str(it.index))
                else:
                    raise TypeError("Invalid operator type")
                it.iternext()
            assert "truth_operators_as_expansion_storage" in self._content
            # Save basis functions content
            assert len(self._content["basis_functions"]) in (0, 1, 2)
            BasisFunctionsContentLengthIO.save_file(
                len(self._content["basis_functions"]), full_directory,
                "basis_functions_length")
            for (index, basis_functions) in enumerate(
                    self._content["basis_functions"]):
                BasisFunctionsProblemNameIO.save_file(
                    get_reduced_problem_from_basis_functions(
                        basis_functions).truth_problem.name(), full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                BasisFunctionsProblemNameIO.save_file(
                    basis_functions._components_name, full_directory,
                    "basis_functions_" + str(index) + "_components_name")
        else:
            raise ValueError("Invalid type")

    def load(self, directory, filename):
        if self._type != "empty":  # avoid loading multiple times
            if self._type in ("basis_functions_matrix", "functions_list"):
                delayed_functions = self._content[self._type]
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions,
                    flags=["c_index", "multi_index", "refs_ok"],
                    op_flags=["readonly"])
                while not it.finished:
                    if isinstance(delayed_functions[it.multi_index],
                                  DelayedFunctionsList):
                        assert self._type == "functions_list"
                        if len(
                                delayed_functions[it.multi_index]
                        ) > 0:  # ... unless it is an empty FunctionsList
                            return False
                    elif isinstance(delayed_functions[it.multi_index],
                                    DelayedBasisFunctionsMatrix):
                        assert self._type == "basis_functions_matrix"
                        if sum(
                                delayed_functions[it.multi_index].
                                _component_name_to_basis_component_length.
                                values()
                        ) > 0:  # ... unless it is an empty BasisFunctionsMatrix
                            return False
                    else:
                        raise TypeError("Invalid delayed functions")
                    it.iternext()
            else:
                return False
        # Get full directory name
        full_directory = Folders.Folder(os.path.join(str(directory), filename))
        # Detect trivial case
        assert TypeIO.exists_file(full_directory, "type")
        imported_type = TypeIO.load_file(full_directory, "type")
        self._type = imported_type
        assert self._type in ("basis_functions_matrix", "empty",
                              "error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22",
                              "functions_list", "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            # Load delayed functions
            assert self._type in self._content
            delayed_functions = self._content[self._type]
            it = NonAffineExpansionStorageContent_Iterator(
                delayed_functions, flags=["c_index", "multi_index", "refs_ok"])
            while not it.finished:
                delayed_function = delayed_functions[it.multi_index]
                delayed_function.load(full_directory,
                                      "delayed_functions_" + str(it.index))
                it.iternext()
        elif self._type == "empty":
            pass
        elif self._type in ("error_estimation_operators_11",
                            "error_estimation_operators_21",
                            "error_estimation_operators_22"):
            # Load delayed functions
            assert "delayed_functions" not in self._content
            self._content["delayed_functions"] = [
                NonAffineExpansionStorageContent_Base(self._shape[0],
                                                      dtype=object),
                NonAffineExpansionStorageContent_Base(self._shape[1],
                                                      dtype=object)
            ]
            for (index, delayed_functions) in enumerate(
                    self._content["delayed_functions"]):
                it = NonAffineExpansionStorageContent_Iterator(
                    delayed_functions, flags=["c_index", "refs_ok"])
                while not it.finished:
                    assert DelayedFunctionsTypeIO.exists_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    delayed_function_type = DelayedFunctionsTypeIO.load_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_type")
                    assert DelayedFunctionsProblemNameIO.exists_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_problem_name")
                    delayed_function_problem_name = DelayedFunctionsProblemNameIO.load_file(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_problem_name")
                    delayed_function_problem = get_problem_from_problem_name(
                        delayed_function_problem_name)
                    assert delayed_function_type in (
                        "DelayedBasisFunctionsMatrix", "DelayedLinearSolver")
                    if delayed_function_type == "DelayedBasisFunctionsMatrix":
                        delayed_function = DelayedBasisFunctionsMatrix(
                            delayed_function_problem.V)
                        delayed_function.init(
                            delayed_function_problem.components)
                    elif delayed_function_type == "DelayedLinearSolver":
                        delayed_function = DelayedLinearSolver()
                    else:
                        raise ValueError("Invalid delayed function")
                    delayed_function.load(
                        full_directory, "delayed_functions_" + str(index) +
                        "_" + str(it.index) + "_content")
                    delayed_functions[it.index] = delayed_function
                    it.iternext()
            # Load inner product
            assert ErrorEstimationInnerProductIO.exists_file(
                full_directory, "inner_product_matrix_problem_name")
            inner_product_matrix_problem_name = ErrorEstimationInnerProductIO.load_file(
                full_directory, "inner_product_matrix_problem_name")
            inner_product_matrix_problem = get_problem_from_problem_name(
                inner_product_matrix_problem_name)
            inner_product_matrix_reduced_problem = get_reduced_problem_from_problem(
                inner_product_matrix_problem)
            self._content[
                "inner_product_matrix"] = inner_product_matrix_reduced_problem._error_estimation_inner_product
            # Recompute shape
            assert "delayed_functions_shape" not in self._content
            self._content["delayed_functions_shape"] = DelayedTransposeShape(
                (self._content["delayed_functions"][0][0],
                 self._content["delayed_functions"][1][0]))
            # Prepare precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice()
        elif self._type == "empty":
            pass
        elif self._type == "operators":
            # Load truth content
            assert "truth_operators" not in self._content
            self._content[
                "truth_operators"] = NonAffineExpansionStorageContent_Base(
                    self._shape, dtype=object)
            it = NonAffineExpansionStorageContent_Iterator(
                self._content["truth_operators"],
                flags=["c_index", "multi_index", "refs_ok"])
            while not it.finished:
                assert TruthContentItemIO.exists_file(
                    full_directory,
                    "truth_operator_" + str(it.index) + "_type")
                operator_type = TruthContentItemIO.load_file(
                    full_directory,
                    "truth_operator_" + str(it.index) + "_type")
                assert operator_type in ("NumericForm",
                                         "ParametrizedTensorFactory")
                if operator_type == "NumericForm":
                    assert TruthContentItemIO.exists_file(
                        full_directory, "truth_operator_" + str(it.index))
                    value = TruthContentItemIO.load_file(
                        full_directory, "truth_operator_" + str(it.index))
                    self._content["truth_operators"][
                        it.multi_index] = NumericForm(value)
                elif operator_type == "ParametrizedTensorFactory":
                    assert TruthContentItemIO.exists_file(
                        full_directory, "truth_operator_" + str(it.index))
                    (problem_name, term, index) = TruthContentItemIO.load_file(
                        full_directory, "truth_operator_" + str(it.index))
                    truth_problem = get_problem_from_problem_name(problem_name)
                    self._content["truth_operators"][
                        it.multi_index] = truth_problem.operator[term][index]
                else:
                    raise ValueError("Invalid operator type")
                it.iternext()
            assert "truth_operators_as_expansion_storage" not in self._content
            self._prepare_truth_operators_as_expansion_storage()
            # Load basis functions content
            assert BasisFunctionsContentLengthIO.exists_file(
                full_directory, "basis_functions_length")
            basis_functions_length = BasisFunctionsContentLengthIO.load_file(
                full_directory, "basis_functions_length")
            assert basis_functions_length in (0, 1, 2)
            assert "basis_functions" not in self._content
            self._content["basis_functions"] = list()
            for index in range(basis_functions_length):
                assert BasisFunctionsProblemNameIO.exists_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                basis_functions_problem_name = BasisFunctionsProblemNameIO.load_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_problem_name")
                assert BasisFunctionsProblemNameIO.exists_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_components_name")
                basis_functions_components_name = BasisFunctionsProblemNameIO.load_file(
                    full_directory,
                    "basis_functions_" + str(index) + "_components_name")
                basis_functions_problem = get_problem_from_problem_name(
                    basis_functions_problem_name)
                basis_functions_reduced_problem = get_reduced_problem_from_problem(
                    basis_functions_problem)
                basis_functions = basis_functions_reduced_problem.basis_functions
                if basis_functions_components_name != basis_functions_problem.components:
                    basis_functions = basis_functions[
                        basis_functions_components_name]
                self._content["basis_functions"].append(basis_functions)
            # Recompute shape
            self._content["basis_functions_shape"] = DelayedTransposeShape(
                self._content["basis_functions"])
            # Reset precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice()
        else:
            raise ValueError("Invalid type")
        return True

    def _prepare_trivial_precomputed_slice(self):
        empty_slice = slice(None)
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22", "operators")
        if self._type == "error_estimation_operators_11":
            pass  # nothing to be done (scalar content)
        elif self._type == "error_estimation_operators_21":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], empty_slice,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)
            self._precomputed_slices[slice_] = self
        elif self._type == "error_estimation_operators_22":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"],
                (empty_slice, empty_slice),
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)
            self._precomputed_slices[slice_] = self
        elif self._type == "operators":
            assert len(self._content["basis_functions"]) in (0, 1, 2)
            assert "basis_functions_shape" in self._content

            if len(self._content["basis_functions"]) is 0:
                pass  # nothing to be done (scalar content)
            elif len(self._content["basis_functions"]) is 1:
                slice_ = slice_to_array(
                    self._content["basis_functions_shape"], empty_slice,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_length,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_index)
                self._precomputed_slices[slice_] = self
            elif len(self._content["basis_functions"]) is 2:
                slices = slice_to_array(
                    self._content["basis_functions_shape"],
                    (empty_slice, empty_slice),
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_length,
                    self._content["basis_functions_shape"].
                    _component_name_to_basis_component_index)
                self._precomputed_slices[slices] = self
            else:
                raise ValueError("Invalid length")
        else:
            raise ValueError("Invalid type")

    @overload(
        slice, )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_21", "operators")
        if self._type == "error_estimation_operators_21":
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], key,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["inner_product_matrix"] = self._content[
                    "inner_product_matrix"]
                output._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                for q in range(self._shape[0]):
                    output._content["delayed_functions"][0][q] = self._content[
                        "delayed_functions"][0][q][key]
                for q in range(self._shape[1]):
                    output._content["delayed_functions"][1][q] = self._content[
                        "delayed_functions"][1][q]
                output._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (output._content["delayed_functions"][0][0],
                         output._content["delayed_functions"][1][0]))
                self._precomputed_slices[slice_] = output
                return output
        elif self._type == "operators":
            assert "basis_functions" in self._content
            assert len(self._content["basis_functions"]) is 1
            assert "basis_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["basis_functions_shape"], key,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["truth_operators"] = self._content[
                    "truth_operators"]
                output._content[
                    "truth_operators_as_expansion_storage"] = self._content[
                        "truth_operators_as_expansion_storage"]
                output._content["basis_functions"] = list()
                output._content["basis_functions"].append(
                    self._content["basis_functions"][0][key])
                output._content[
                    "basis_functions_shape"] = DelayedTransposeShape(
                        output._content["basis_functions"])
                self._precomputed_slices[slice_] = output
                return output
        else:
            raise ValueError("Invalid type")

    @overload(
        tuple_of(slice), )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_22", "operators")
        if self._type == "error_estimation_operators_22":
            assert len(key) is 2
            assert "delayed_functions" in self._content
            assert len(self._content["delayed_functions"]) is 2
            assert "delayed_functions_shape" in self._content

            slice_ = slice_to_array(
                self._content["delayed_functions_shape"], key,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["delayed_functions_shape"].
                _component_name_to_basis_component_index)

            if slice_ in self._precomputed_slices:
                return self._precomputed_slices[slice_]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["inner_product_matrix"] = self._content[
                    "inner_product_matrix"]
                output._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                for q in range(self._shape[0]):
                    output._content["delayed_functions"][0][q] = self._content[
                        "delayed_functions"][0][q][key[0]]
                for q in range(self._shape[1]):
                    output._content["delayed_functions"][1][q] = self._content[
                        "delayed_functions"][1][q][key[1]]
                output._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (output._content["delayed_functions"][0][0],
                         output._content["delayed_functions"][1][0]))
                self._precomputed_slices[slice_] = output
                return output
        elif self._type == "operators":
            assert len(key) is 2
            assert "basis_functions" in self._content
            assert len(self._content["basis_functions"]) is 2
            assert "basis_functions_shape" in self._content

            slices = slice_to_array(
                self._content["basis_functions_shape"], key,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_length,
                self._content["basis_functions_shape"].
                _component_name_to_basis_component_index)

            if slices in self._precomputed_slices:
                return self._precomputed_slices[slices]
            else:
                output = NonAffineExpansionStorage.__new__(
                    type(self), *self._shape)
                output.__init__(*self._shape)
                output._type = self._type
                output._content["truth_operators"] = self._content[
                    "truth_operators"]
                output._content[
                    "truth_operators_as_expansion_storage"] = self._content[
                        "truth_operators_as_expansion_storage"]
                output._content["basis_functions"] = list()
                output._content["basis_functions"].append(
                    self._content["basis_functions"][0][key[0]])
                output._content["basis_functions"].append(
                    self._content["basis_functions"][1][key[1]])
                output._content[
                    "basis_functions_shape"] = DelayedTransposeShape(
                        output._content["basis_functions"])
                self._precomputed_slices[slices] = output
                return output
        else:
            raise ValueError("Invalid type")

    @overload(
        int, )
    def __getitem__(self, key):
        assert self._type in ("basis_functions_matrix", "functions_list",
                              "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            return self._content[self._type][key]
        elif self._type == "operators":
            return self._delay_transpose(self._content["basis_functions"],
                                         self._content["truth_operators"][key])
        else:
            raise ValueError("Invalid type")

    @overload(
        tuple_of(int), )
    def __getitem__(self, key):
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22")
        return self._delay_transpose(
            (self._content["delayed_functions"][0][key[0]],
             self._content["delayed_functions"][1][key[1]]),
            self._content["inner_product_matrix"])

    def __iter__(self):
        assert self._type in ("basis_functions_matrix", "functions_list",
                              "operators")
        if self._type in ("basis_functions_matrix", "functions_list"):
            return self._content[self._type].__iter__()
        elif self._type == "operators":
            return (self._delay_transpose(self._content["basis_functions"], op)
                    for op in self._content["truth_operators"].__iter__())
        else:
            raise ValueError("Invalid type")

    @overload((int, tuple_of(int)), AbstractBasisFunctionsMatrix)
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "basis_functions_matrix"
        else:
            self._type = "basis_functions_matrix"
            self._content[self._type] = NonAffineExpansionStorageContent_Base(
                self._shape, dtype=object)
        self._content[self._type][key] = DelayedBasisFunctionsMatrix(
            item.space)
        self._content[self._type][key].init(item._components_name)

    @overload((int, tuple_of(int)), AbstractFunctionsList)
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "functions_list"
        else:
            self._type = "functions_list"
            self._content[self._type] = NonAffineExpansionStorageContent_Base(
                self._shape, dtype=object)
        self._content[self._type][key] = DelayedFunctionsList(item.space)

    @overload((int, tuple_of(int)), DelayedTranspose)
    def __setitem__(self, key, item):
        assert isinstance(item._args[0],
                          (AbstractBasisFunctionsMatrix,
                           DelayedBasisFunctionsMatrix, DelayedLinearSolver))
        if isinstance(item._args[0], AbstractBasisFunctionsMatrix):
            if self._type != "empty":
                assert self._type == "operators"
            else:
                self._type = "operators"
            # Reset attributes if size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content.pop("truth_operators_as_expansion_storage", None)
                self._content[
                    "truth_operators"] = NonAffineExpansionStorageContent_Base(
                        self._shape, dtype=object)
                self._content["basis_functions"] = list()
                self._content.pop("basis_functions_shape", None)
            # Store
            assert len(item._args) in (2, 3)
            if len(self._content["basis_functions"]) is 0:
                assert isinstance(item._args[0], AbstractBasisFunctionsMatrix)
                self._content["basis_functions"].append(item._args[0])
            else:
                assert item._args[0] is self._content["basis_functions"][0]
            self._content["truth_operators"][key] = item._args[1]
            if len(item._args) > 2:
                if len(self._content["basis_functions"]) is 1:
                    assert isinstance(item._args[2],
                                      AbstractBasisFunctionsMatrix)
                    self._content["basis_functions"].append(item._args[2])
                else:
                    assert item._args[2] is self._content["basis_functions"][1]
            # Recompute shape
            if "basis_functions_shape" not in self._content:
                self._content["basis_functions_shape"] = DelayedTransposeShape(
                    self._content["basis_functions"])
            # Compute truth expansion storage and prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._prepare_truth_operators_as_expansion_storage()
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice()
        elif isinstance(item._args[0],
                        (DelayedBasisFunctionsMatrix, DelayedLinearSolver)):
            assert len(item._args) is 3
            assert isinstance(
                item._args[2],
                (DelayedBasisFunctionsMatrix, DelayedLinearSolver))
            if isinstance(item._args[0], DelayedLinearSolver):
                assert isinstance(item._args[2], DelayedLinearSolver)
                if self._type != "empty":
                    assert self._type == "error_estimation_operators_11"
                else:
                    self._type = "error_estimation_operators_11"
            elif isinstance(item._args[0], DelayedBasisFunctionsMatrix):
                if isinstance(item._args[2], DelayedLinearSolver):
                    if self._type != "empty":
                        assert self._type == "error_estimation_operators_21"
                    else:
                        self._type = "error_estimation_operators_21"
                elif isinstance(item._args[2], DelayedBasisFunctionsMatrix):
                    if self._type != "empty":
                        assert self._type == "error_estimation_operators_22"
                    else:
                        self._type = "error_estimation_operators_22"
                else:
                    raise TypeError(
                        "Invalid arguments to NonAffineExpansionStorage")
            else:
                raise TypeError(
                    "Invalid arguments to NonAffineExpansionStorage")
            # Reset attributes if size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"] = [
                    NonAffineExpansionStorageContent_Base(self._shape[0],
                                                          dtype=object),
                    NonAffineExpansionStorageContent_Base(self._shape[1],
                                                          dtype=object)
                ]
                self._content.pop("delayed_functions_shape", None)
                self._content.pop("inner_product_matrix", None)
            # Store
            if key[1] == self._smallest_key[
                    1]:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"][0][key[0]] = item._args[0]
            else:
                assert item._args[0] is self._content["delayed_functions"][0][
                    key[0]]
            if "inner_product_matrix" not in self._content:
                self._content["inner_product_matrix"] = item._args[1]
            else:
                assert item._args[1] is self._content["inner_product_matrix"]
            if key[0] == self._smallest_key[
                    0]:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._content["delayed_functions"][1][key[1]] = item._args[2]
            else:
                assert item._args[2] is self._content["delayed_functions"][1][
                    key[1]]
            # Recompute shape
            if "delayed_functions_shape" not in self._content:
                self._content[
                    "delayed_functions_shape"] = DelayedTransposeShape(
                        (item._args[0], item._args[2]))
            else:
                assert DelayedTransposeShape((
                    item._args[0],
                    item._args[2])) == self._content["delayed_functions_shape"]
            # Prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice()
        else:
            raise TypeError("Invalid arguments to NonAffineExpansionStorage")

    @overload((int, tuple_of(int)),
              (AbstractParametrizedTensorFactory, Number))
    def __setitem__(self, key, item):
        if self._type != "empty":
            assert self._type == "operators"
        else:
            self._type = "operators"
        # Reset attributes, similarly to what is done for Vector and Matrix operators
        if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
            self._content.pop("truth_operators_as_expansion_storage", None)
            self._content[
                "truth_operators"] = NonAffineExpansionStorageContent_Base(
                    self._shape, dtype=object)
            self._content["basis_functions"] = list()  # will stay empty
            self._content.pop("basis_functions_shape", None)
        # Store
        if isinstance(item, Number):
            self._content["truth_operators"][key] = NumericForm(item)
        else:
            assert isinstance(item, AbstractParametrizedTensorFactory)
            assert len(item._spaces) is 0
            self._content["truth_operators"][key] = item
        # Recompute (trivial) shape
        if "basis_functions_shape" not in self._content:
            self._content["basis_functions_shape"] = DelayedTransposeShape(
                self._content["basis_functions"])
        # Compute truth expansion storage and prepare precomputed slices
        if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
            self._prepare_truth_operators_as_expansion_storage()

    def _prepare_truth_operators_as_expansion_storage(self):
        from rbnics.backends import NonAffineExpansionStorage
        assert self._type == "operators"
        assert self.order() is 1
        extracted_operators = tuple(op._form
                                    for op in self._content["truth_operators"])
        assert "truth_operators_as_expansion_storage" not in self._content
        self._content[
            "truth_operators_as_expansion_storage"] = NonAffineExpansionStorage(
                extracted_operators)
        if not all(isinstance(op, Number) for op in extracted_operators):
            problems = [
                get_problem_from_parametrized_operator(op)
                for op in self._content["truth_operators"]
            ]
            assert all([problem is problems[0] for problem in problems])
            for extracted_operator in self._content[
                    "truth_operators_as_expansion_storage"]:
                add_to_map_from_parametrized_operator_to_problem(
                    extracted_operator, problems[0])

    def __len__(self):
        assert self._type == "operators"
        assert self.order() is 1
        return self._shape[0]

    def order(self):
        assert self._type in ("error_estimation_operators_11",
                              "error_estimation_operators_21",
                              "error_estimation_operators_22", "operators")
        return len(self._shape)

    def _delay_transpose(self, pre_post, op):
        assert len(pre_post) in (0, 1, 2)
        if len(pre_post) is 0:
            return op
        elif len(pre_post) is 1:
            return DelayedTranspose(pre_post[0]) * op
        else:
            return DelayedTranspose(pre_post[0]) * op * pre_post[1]
Beispiel #3
0
    class _AffineExpansionStorage(AbstractAffineExpansionStorage):
        def __init__(self, arg1, arg2):
            self._content = None
            self._precomputed_slices = Cache(
            )  # from tuple to AffineExpansionStorage
            self._smallest_key = None
            self._previous_key = None
            self._largest_key = None
            # Auxiliary storage for __getitem__ slicing
            self._component_name_to_basis_component_index = None  # will be filled in in __setitem__, if required
            self._component_name_to_basis_component_length = None  # will be filled in in __setitem__, if required
            # Initialize arguments from inputs
            self._init(arg1, arg2)

        @overload(
            (tuple_of(backend.Matrix.Type()), tuple_of(backend.Vector.Type())),
            None)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((len(arg1), ),
                                                               dtype=object)
            self._smallest_key = 0
            self._largest_key = len(arg1) - 1
            for (i, arg1i) in enumerate(arg1):
                self[i] = arg1i

        @overload(int, None)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((arg1, ),
                                                               dtype=object)
            self._smallest_key = 0
            self._largest_key = arg1 - 1

        @overload(int, int)
        def _init(self, arg1, arg2):
            self._content = AffineExpansionStorageContent_Base((arg1, arg2),
                                                               dtype=object)
            self._smallest_key = (0, 0)
            self._largest_key = (arg1 - 1, arg2 - 1)

        def save(self, directory, filename):
            # Get full directory name
            full_directory = Folders.Folder(
                os.path.join(str(directory), filename))
            full_directory.create()
            # Exit in the trivial case of empty affine expansion
            if self._content.size is 0:
                return
            # Initialize iterator
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["c_index", "multi_index", "refs_ok"],
                op_flags=["readonly"])
            # Save content item type and shape
            self._save_content_item_type_shape(self._content[it.multi_index],
                                               it, full_directory)
            # Save content
            self._save_content(self._content[it.multi_index], it,
                               full_directory)
            # Save dicts
            self._save_dicts(full_directory)

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("matrix", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file((item.M, item.N), full_directory,
                                         "content_item_shape")

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("vector", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(item.N, full_directory,
                                         "content_item_shape")

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("function", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(item.N, full_directory,
                                         "content_item_shape")

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("scalar", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("functions_list", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("basis_functions_matrix",
                                        full_directory, "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content_item_type_shape(self, item, it, full_directory):
            ContentItemTypeIO.save_file("empty", full_directory,
                                        "content_item_type")
            ContentItemShapeIO.save_file(None, full_directory,
                                         "content_item_shape")

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.tensor_save(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.tensor_save(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                wrapping.function_save(self._content[it.multi_index],
                                       full_directory,
                                       "content_item_" + str(it.index))
                it.iternext()

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                ScalarContentIO.save_file(self._content[it.multi_index],
                                          full_directory,
                                          "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].save(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].save(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _save_content(self, item, it, full_directory):
            pass

        def _save_dicts(self, full_directory):
            DictIO.save_file(self._component_name_to_basis_component_index,
                             full_directory,
                             "component_name_to_basis_component_index")
            DictIO.save_file(self._component_name_to_basis_component_length,
                             full_directory,
                             "component_name_to_basis_component_length")

        def load(self, directory, filename):
            if self._content is not None:  # avoid loading multiple times
                if self._content.size > 0:
                    it = AffineExpansionStorageContent_Iterator(
                        self._content,
                        flags=["multi_index", "refs_ok"],
                        op_flags=["readonly"])
                    while not it.finished:
                        if self._content[
                                it.
                                multi_index] is not None:  # ... but only if there is at least one element different from None
                            if isinstance(self._content[it.multi_index],
                                          AbstractFunctionsList):
                                if len(
                                        self._content[it.multi_index]
                                ) > 0:  # ... unless it is an empty FunctionsList
                                    return False
                            elif isinstance(self._content[it.multi_index],
                                            AbstractBasisFunctionsMatrix):
                                if sum(
                                        self._content[it.multi_index].
                                        _component_name_to_basis_component_length
                                        .values()
                                ) > 0:  # ... unless it is an empty BasisFunctionsMatrix
                                    return False
                            else:
                                return False
                        it.iternext()
            # Get full directory name
            full_directory = Folders.Folder(
                os.path.join(str(directory), filename))
            # Exit in the trivial case of empty affine expansion
            if self._content.size is 0:
                return True
            # Load content item type and shape
            reference_item = self._load_content_item_type_shape(full_directory)
            # Initialize iterator
            it = AffineExpansionStorageContent_Iterator(
                self._content, flags=["c_index", "multi_index", "refs_ok"])
            # Load content
            self._load_content(reference_item, it, full_directory)
            # Load dicts
            self._load_dicts(full_directory)
            # Reset precomputed slices
            self._precomputed_slices.clear()
            self._prepare_trivial_precomputed_slice(reference_item)
            # Return
            return True

        def _load_content_item_type_shape(self, full_directory):
            assert ContentItemTypeIO.exists_file(full_directory,
                                                 "content_item_type")
            content_item_type = ContentItemTypeIO.load_file(
                full_directory, "content_item_type")
            assert ContentItemShapeIO.exists_file(full_directory,
                                                  "content_item_shape")
            assert content_item_type in ("matrix", "vector", "function",
                                         "scalar", "functions_list",
                                         "basis_functions_matrix", "empty")
            if content_item_type == "matrix":
                (M, N) = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Matrix(M, N)
            elif content_item_type == "vector":
                N = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Vector(N)
            elif content_item_type == "function":
                N = ContentItemShapeIO.load_file(
                    full_directory,
                    "content_item_shape",
                    globals={"OnlineSizeDict": OnlineSizeDict})
                return backend.Function(N)
            elif content_item_type == "scalar":
                return 0.
            elif content_item_type == "functions_list":  # self._content has already been populated with empty items
                assert isinstance(self._content[self._smallest_key],
                                  AbstractFunctionsList)
                return self._content[self._smallest_key]
            elif content_item_type == "basis_functions_matrix":  # self._content has already been populated with empty items
                assert isinstance(self._content[self._smallest_key],
                                  AbstractBasisFunctionsMatrix)
                return self._content[self._smallest_key]
            elif content_item_type == "empty":
                return None
            else:  # impossible to arrive here anyway thanks to the assert
                raise ValueError("Invalid content item type.")

        @overload(backend.Matrix.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.tensor_copy(item)
                wrapping.tensor_load(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Vector.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.tensor_copy(item)
                wrapping.tensor_load(self._content[it.multi_index],
                                     full_directory,
                                     "content_item_" + str(it.index))
                it.iternext()

        @overload(backend.Function.Type(),
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = wrapping.function_copy(item)
                wrapping.function_load(self._content[it.multi_index],
                                       full_directory,
                                       "content_item_" + str(it.index))
                it.iternext()

        @overload(Number, AffineExpansionStorageContent_Iterator,
                  Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index] = ScalarContentIO.load_file(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractFunctionsList,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].load(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(AbstractBasisFunctionsMatrix,
                  AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            while not it.finished:
                self._content[it.multi_index].load(
                    full_directory, "content_item_" + str(it.index))
                it.iternext()

        @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder)
        def _load_content(self, item, it, full_directory):
            pass

        def _load_dicts(self, full_directory):
            assert DictIO.exists_file(
                full_directory, "component_name_to_basis_component_index")
            self._component_name_to_basis_component_index = DictIO.load_file(
                full_directory,
                "component_name_to_basis_component_index",
                globals={
                    "ComponentNameToBasisComponentIndexDict":
                    ComponentNameToBasisComponentIndexDict
                })
            assert DictIO.exists_file(
                full_directory, "component_name_to_basis_component_length")
            self._component_name_to_basis_component_length = DictIO.load_file(
                full_directory,
                "component_name_to_basis_component_length",
                globals={"OnlineSizeDict": OnlineSizeDict})
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["multi_index", "refs_ok"],
                op_flags=["readonly"])
            while not it.finished:
                if self._component_name_to_basis_component_index is not None:
                    self._content[
                        it.
                        multi_index]._component_name_to_basis_component_index = self._component_name_to_basis_component_index
                if self._component_name_to_basis_component_length is not None:
                    self._content[
                        it.
                        multi_index]._component_name_to_basis_component_length = self._component_name_to_basis_component_length
                it.iternext()

        @overload(
            backend.Matrix.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item, (empty_slice, empty_slice),
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            backend.Vector.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item, empty_slice,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            backend.Function.Type(), )
        def _prepare_trivial_precomputed_slice(self, item):
            empty_slice = slice(None)
            slices = slice_to_array(
                item.vector, empty_slice,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)
            self._precomputed_slices[slices] = self

        @overload(
            Number, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            AbstractFunctionsList, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            AbstractBasisFunctionsMatrix, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            None, )
        def _prepare_trivial_precomputed_slice(self, item):
            pass

        @overload(
            (slice, tuple_of(slice)), )
        def __getitem__(self, key):
            """
            return the subtensors of size "key" for every element in content. (e.g. submatrices [1:5,1:5] of the affine expansion of A)
            """
            it = AffineExpansionStorageContent_Iterator(
                self._content,
                flags=["multi_index", "refs_ok"],
                op_flags=["readonly"])
            slices = slice_to_array(
                self._content[it.multi_index], key,
                self._component_name_to_basis_component_length,
                self._component_name_to_basis_component_index)

            if slices in self._precomputed_slices:
                return self._precomputed_slices[slices]
            else:
                output = _AffineExpansionStorage.__new__(
                    type(self), *self._content.shape)
                output.__init__(*self._content.shape)
                while not it.finished:
                    # Slice content and assign
                    output[it.multi_index] = self._do_slicing(
                        self._content[it.multi_index], key)
                    # Increment
                    it.iternext()
                self._precomputed_slices[slices] = output
                return output

        @overload(
            (int, tuple_of(int)), )
        def __getitem__(self, key):
            """
            return the element at position "key" in the storage (e.g. q-th matrix in the affine expansion of A, q = 1 ... Qa)
            """
            return self._content[key]

        @overload(backend.Matrix.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return item[key]

        @overload(backend.Vector.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return item[key]

        @overload(backend.Function.Type(), (slice, tuple_of(slice)))
        def _do_slicing(self, item, key):
            return backend.Function(item.vector()[key])

        def __setitem__(self, key, item):
            assert not isinstance(
                key, slice
            )  # only able to set the element at position "key" in the storage
            # Check that __getitem__ is not random acces but called for increasing key and store current key
            self._assert_setitem_order(key)
            self._update_previous_key(key)
            # Store item
            self._content[key] = item
            # Reset attributes related to basis functions matrix if the size has changed
            if key == self._smallest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._component_name_to_basis_component_index = None
                self._component_name_to_basis_component_length = None
            # Also store attributes related to basis functions matrix for __getitem__ slicing
            assert isinstance(
                item,
                (
                    backend.Matrix.Type(),  # output e.g. of Z^T*A*Z
                    backend.Vector.Type(),  # output e.g. of Z^T*F
                    backend.Function.Type(
                    ),  # for initial conditions of unsteady problems
                    Number,  # output of Riesz_F^T*X*Riesz_F
                    AbstractFunctionsList,  # auxiliary storage of Riesz representors
                    AbstractBasisFunctionsMatrix  # auxiliary storage of Riesz representors
                ))
            if isinstance(item, backend.Function.Type()):
                item = item.vector()
            if isinstance(item, (backend.Matrix.Type(), backend.Vector.Type(),
                                 AbstractBasisFunctionsMatrix)):
                assert (
                    self._component_name_to_basis_component_index is None) == (
                        self._component_name_to_basis_component_length is None)
                if self._component_name_to_basis_component_index is None:
                    self._component_name_to_basis_component_index = item._component_name_to_basis_component_index
                    self._component_name_to_basis_component_length = item._component_name_to_basis_component_length
                else:
                    assert self._component_name_to_basis_component_index == item._component_name_to_basis_component_index
                    assert self._component_name_to_basis_component_length == item._component_name_to_basis_component_length
            else:
                assert self._component_name_to_basis_component_index is None
                assert self._component_name_to_basis_component_length is None
            # Reset and prepare precomputed slices
            if key == self._largest_key:  # this assumes that __getitem__ is not random acces but called for increasing key
                self._precomputed_slices.clear()
                self._prepare_trivial_precomputed_slice(item)

        @overload(int)
        def _assert_setitem_order(self, current_key):
            if self._previous_key is None:
                assert current_key == 0
            else:
                assert current_key == (self._previous_key +
                                       1) % (self._largest_key + 1)

        @overload(int, int)
        def _assert_setitem_order(self, current_key_0, current_key_1):
            if self._previous_key is None:
                assert current_key_0 == 0
                assert current_key_1 == 0
            else:
                expected_key_1 = (self._previous_key[1] +
                                  1) % (self._largest_key[1] + 1)
                if expected_key_1 is 0:
                    expected_key_0 = (self._previous_key[0] +
                                      1) % (self._largest_key[0] + 1)
                else:
                    expected_key_0 = self._previous_key[0]
                assert current_key_0 == expected_key_0
                assert current_key_1 == expected_key_1

        @overload(tuple_of(int))
        def _assert_setitem_order(self, current_key):
            self._assert_setitem_order(*current_key)

        @overload(int)
        def _update_previous_key(self, current_key):
            self._previous_key = current_key

        @overload(int, int)
        def _update_previous_key(self, current_key_0, current_key_1):
            self._previous_key = (current_key_0, current_key_1)

        @overload(tuple_of(int))
        def _update_previous_key(self, current_key):
            self._update_previous_key(*current_key)

        def __iter__(self):
            return AffineExpansionStorageContent_Iterator(
                self._content, flags=["refs_ok"], op_flags=["readonly"])

        def __len__(self):
            assert self.order() == 1
            return self._content.size

        def order(self):
            assert self._content is not None
            return len(self._content.shape)
Beispiel #4
0
class DelayedFunctionsList(object):
    def __init__(self, space):
        self.space = space
        self._enrich_memory = list()
        self._precomputed_slices = Cache(
        )  # from tuple to DelayedFunctionsList

    def enrich(self, function, component=None, weight=None, copy=True):
        assert component is None
        assert weight is None
        assert copy is True
        # Append to storage
        self._enrich(function)
        # Reset precomputed slices
        self._precomputed_slices.clear()
        # Prepare trivial precomputed slice
        self._precomputed_slices[0, len(self._enrich_memory)] = self

    @overload(DelayedLinearSolver)
    def _enrich(self, function):
        self._enrich_memory.append(function)

    @overload(lambda cls: cls)
    def _enrich(self, other):
        assert self.space is other.space
        self._enrich_memory.extend(other._enrich_memory)

    @overload(int)
    def __getitem__(self, key):
        return self._enrich_memory[key]

    @overload(slice)  # e.g. key = :N, return the first N functions
    def __getitem__(self, key):
        if key.start is not None:
            start = key.start
        else:
            start = 0
        assert key.step is None
        if key.stop is not None:
            stop = key.stop
        else:
            stop = len(self._enrich_memory)

        assert start <= stop
        if start < stop:
            assert start >= 0
            assert start < len(self._enrich_memory)
            assert stop > 0
            assert stop <= len(self._enrich_memory)
        # elif start == stop
        #    trivial case which will result in an empty FunctionsList

        if (start, stop) not in self._precomputed_slices:
            output = DelayedFunctionsList(self.space)
            if start < stop:
                output._enrich_memory = self._enrich_memory[key]
            self._precomputed_slices[start, stop] = output
        return self._precomputed_slices[start, stop]

    def __len__(self):
        return len(self._enrich_memory)

    def save(self, directory, filename):
        LengthIO.save_file(len(self._enrich_memory), directory,
                           filename + "_length")
        for (index, memory) in enumerate(self._enrich_memory):
            memory.save(directory, filename + "_" + str(index))

    def load(self, directory, filename):
        if len(self._enrich_memory) > 0:  # avoid loading multiple times
            return False
        else:
            assert LengthIO.exists_file(directory, filename + "_length")
            len_memory = LengthIO.load_file(directory, filename + "_length")
            for index in range(len_memory):
                memory = DelayedLinearSolver()
                memory.load(directory, filename + "_" + str(index))
                self.enrich(memory)
            return True

    def get_problem_name(self):
        problem_name = None
        for memory in self._enrich_memory:
            if problem_name is None:
                problem_name = memory.get_problem_name()
            else:
                assert memory.get_problem_name() == problem_name
        return problem_name
Beispiel #5
0
    class _TensorsList(AbstractTensorsList):
        def __init__(self, space, empty_tensor):
            self.space = space
            self.empty_tensor = empty_tensor
            self.mpi_comm = wrapping.get_mpi_comm(space)
            self._list = list()  # of tensors
            self._precomputed_slices = Cache()  # from tuple to TensorsList

        def enrich(self, tensors):
            # Append to storage
            self._enrich(tensors)
            # Reset precomputed slices
            self._precomputed_slices.clear()
            # Prepare trivial precomputed slice
            self._precomputed_slices[0, len(self._list)] = self

        @overload(
            (backend.Matrix.Type(), backend.Vector.Type()), )
        def _enrich(self, tensors):
            self._list.append(wrapping.tensor_copy(tensors))

        @overload(
            lambda cls: cls, )
        def _enrich(self, tensors):
            for tensor in tensors:
                self._list.append(wrapping.tensor_copy(tensor))

        def clear(self):
            self._list = list()
            # Reset precomputed slices
            self._precomputed_slices.clear()

        def save(self, directory, filename):
            self._save_Nmax(directory, filename)
            for (index, tensor) in enumerate(self._list):
                wrapping.tensor_save(tensor, directory,
                                     filename + "_" + str(index))

        def _save_Nmax(self, directory, filename):
            def save_Nmax_task():
                with open(os.path.join(str(directory), filename + ".length"),
                          "w") as length:
                    length.write(str(len(self._list)))

            parallel_io(save_Nmax_task, self.mpi_comm)

        def load(self, directory, filename):
            if len(self._list) > 0:  # avoid loading multiple times
                return False
            Nmax = self._load_Nmax(directory, filename)
            for index in range(Nmax):
                tensor = wrapping.tensor_copy(self.empty_tensor)
                wrapping.tensor_load(tensor, directory,
                                     filename + "_" + str(index))
                self.enrich(tensor)
            return True

        def _load_Nmax(self, directory, filename):
            def load_Nmax_task():
                with open(os.path.join(str(directory), filename + ".length"),
                          "r") as length:
                    return int(length.readline())

            return parallel_io(load_Nmax_task, self.mpi_comm)

        @overload(
            online_backend.OnlineFunction.Type(), )
        def __mul__(self, other):
            return wrapping.tensors_list_mul_online_function(self, other)

        def __len__(self):
            return len(self._list)

        @overload(int)
        def __getitem__(self, key):
            return self._list[key]

        @overload(slice)  # e.g. key = :N, return the first N tensors
        def __getitem__(self, key):
            if key.start is not None:
                start = key.start
                assert start >= 0
                assert start < len(self._list)
            else:
                start = 0
            assert key.step is None
            if key.stop is not None:
                stop = key.stop
                assert stop > 0
                assert stop <= len(self._list)
            else:
                stop = len(self._list)

            if (start, stop) not in self._precomputed_slices:
                output = _TensorsList.__new__(type(self), self.space,
                                              self.empty_tensor)
                output.__init__(self.space, self.empty_tensor)
                output._list = self._list[key]
                self._precomputed_slices[start, stop] = output
            return self._precomputed_slices[start, stop]

        def __iter__(self):
            return self._list.__iter__()
Beispiel #6
0
    class _FunctionsList(AbstractFunctionsList):
        def __init__(self, space, component):
            if component is None:
                self.space = space
            else:
                self.space = wrapping.get_function_subspace(space, component)
            self.mpi_comm = wrapping.get_mpi_comm(space)
            self._list = list()  # of functions
            self._precomputed_slices = Cache()  # from tuple to FunctionsList

        def enrich(self, functions, component=None, weights=None, copy=True):
            # Append to storage
            self._enrich(functions, component, weights, copy)
            # Reset precomputed slices
            self._precomputed_slices = Cache()
            # Prepare trivial precomputed slice
            self._precomputed_slices[0, len(self._list)] = self

        @overload(backend.Function.Type(), (None, str, dict_of(str, str)),
                  (None, Number), bool)
        def _enrich(self, function, component, weight, copy):
            self._add_to_list(function, component, weight, copy)

        @overload((lambda cls: cls, list_of(
            backend.Function.Type()), tuple_of(backend.Function.Type())),
                  (None, str, dict_of(str, str)), (None, list_of(Number)),
                  bool)
        def _enrich(self, functions, component, weights, copy):
            if weights is not None:
                assert len(weights) == len(functions)
                for (index, function) in enumerate(functions):
                    self._add_to_list(function, component, weights[index],
                                      copy)
            else:
                for function in functions:
                    self._add_to_list(function, component, None, copy)

        @overload(TimeSeries, (None, str, dict_of(str, str)),
                  (None, list_of(Number)), bool)
        def _enrich(self, functions, component, weights, copy):
            self._enrich(functions._list, component, weights, copy)

        @overload(object, (None, str, dict_of(str, str)),
                  (None, Number, list_of(Number)), bool)
        def _enrich(self, function, component, weight, copy):
            if AdditionalIsFunction(function):
                function = ConvertAdditionalFunctionTypes(function)
                assert weight is None or isinstance(weight, Number)
                self._add_to_list(function, component, weight, copy)
            elif isinstance(function, list):
                converted_function = list()
                for function_i in function:
                    if AdditionalIsFunction(function_i):
                        converted_function.append(
                            ConvertAdditionalFunctionTypes(function_i))
                    else:
                        raise RuntimeError(
                            "Invalid function provided to FunctionsList.enrich()"
                        )
                assert weight is None or isinstance(weight, list)
                self._enrich(converted_function, component, weight, copy)
            else:
                raise RuntimeError(
                    "Invalid function provided to FunctionsList.enrich()")

        @overload(backend.Function.Type(), (None, str), (None, Number), bool)
        def _add_to_list(self, function, component, weight, copy):
            self._list.append(
                wrapping.function_extend_or_restrict(function, component,
                                                     self.space, component,
                                                     weight, copy))

        @overload(backend.Function.Type(), dict_of(str, str), (None, Number),
                  bool)
        def _add_to_list(self, function, component, weight, copy):
            assert len(component) == 1
            for (component_from, component_to) in component.items():
                break
            self._list.append(
                wrapping.function_extend_or_restrict(function, component_from,
                                                     self.space, component_to,
                                                     weight, copy))

        def clear(self):
            self._list = list()
            # Reset precomputed slices
            self._precomputed_slices.clear()

        def save(self, directory, filename):
            self._save_Nmax(directory, filename)
            for (index, function) in enumerate(self._list):
                wrapping.function_save(function, directory,
                                       filename + "_" + str(index))

        def _save_Nmax(self, directory, filename):
            def save_Nmax_task():
                with open(os.path.join(str(directory), filename + ".length"),
                          "w") as length:
                    length.write(str(len(self._list)))

            parallel_io(save_Nmax_task, self.mpi_comm)

        def load(self, directory, filename):
            if len(self._list) > 0:  # avoid loading multiple times
                return False
            Nmax = self._load_Nmax(directory, filename)
            for index in range(Nmax):
                function = backend.Function(self.space)
                wrapping.function_load(function, directory,
                                       filename + "_" + str(index))
                self.enrich(function)
            return True

        def _load_Nmax(self, directory, filename):
            def load_Nmax_task():
                with open(os.path.join(str(directory), filename + ".length"),
                          "r") as length:
                    return int(length.readline())

            return parallel_io(load_Nmax_task, self.mpi_comm)

        @overload(
            online_backend.OnlineMatrix.Type(), )
        def __mul__(self, other):
            return wrapping.functions_list_mul_online_matrix(
                self, other, type(self))

        @overload(
            (online_backend.OnlineVector.Type(), ThetaType), )
        def __mul__(self, other):
            return wrapping.functions_list_mul_online_vector(self, other)

        @overload(
            online_backend.OnlineFunction.Type(), )
        def __mul__(self, other):
            return wrapping.functions_list_mul_online_vector(
                self, online_wrapping.function_to_vector(other))

        def __len__(self):
            return len(self._list)

        @overload(int)
        def __getitem__(self, key):
            return self._list[key]

        @overload(slice)  # e.g. key = :N, return the first N functions
        def __getitem__(self, key):
            if key.start is not None:
                start = key.start
            else:
                start = 0
            assert key.step is None
            if key.stop is not None:
                stop = key.stop
            else:
                stop = len(self._list)

            assert start <= stop
            if start < stop:
                assert start >= 0
                assert start < len(self._list)
                assert stop > 0
                assert stop <= len(self._list)
            # elif start == stop
            #    trivial case which will result in an empty FunctionsList

            if (start, stop) not in self._precomputed_slices:
                output = _FunctionsList.__new__(type(self), self.space)
                output.__init__(self.space)
                if start < stop:
                    output._list = self._list[key]
                self._precomputed_slices[start, stop] = output
            return self._precomputed_slices[start, stop]

        @overload(int, backend.Function.Type())
        def __setitem__(self, key, item):
            self._list[key] = item

        @overload(int, object)
        def __setitem__(self, key, item):
            if AdditionalIsFunction(item):
                item = ConvertAdditionalFunctionTypes(item)
                self._list[key] = item
            else:
                raise RuntimeError(
                    "Invalid function provided to FunctionsList.__setitem__()")

        def __iter__(self):
            return self._list.__iter__()
    class _BasisFunctionsMatrix(AbstractBasisFunctionsMatrix):
        def __init__(self, space, component=None):
            if component is not None:
                self.space = wrapping.get_function_subspace(space, component)
            else:
                self.space = space
            self.mpi_comm = wrapping.get_mpi_comm(space)
            self._components = dict()  # of FunctionsList
            self._precomputed_sub_components = Cache(
            )  # from tuple to FunctionsList
            self._precomputed_slices = Cache()  # from tuple to FunctionsList
            self._components_name = list()  # filled in by init
            self._component_name_to_basis_component_index = ComponentNameToBasisComponentIndexDict(
            )  # filled in by init
            self._component_name_to_basis_component_length = OnlineSizeDict()

        def init(self, components_name):
            if self._components_name != components_name:  # Do nothing if it was already initialized with the same dicts
                # Store components name
                self._components_name = components_name
                # Initialize components FunctionsList
                self._components.clear()
                for component_name in components_name:
                    self._components[component_name] = backend.FunctionsList(
                        self.space)
                # Prepare len components
                self._component_name_to_basis_component_length.clear()
                for component_name in components_name:
                    self._component_name_to_basis_component_length[
                        component_name] = 0
                # Intialize the component_name_to_basis_component_index dict
                self._component_name_to_basis_component_index.clear()
                for (basis_component_index,
                     component_name) in enumerate(components_name):
                    self._component_name_to_basis_component_index[
                        component_name] = basis_component_index
                # Reset precomputed sub components
                self._precomputed_sub_components.clear()
                # Reset precomputed slices
                self._precomputed_slices.clear()

                # Patch FunctionsList.enrich() to update internal attributes
                def patch_functions_list_enrich(component_name,
                                                functions_list):
                    original_functions_list_enrich = functions_list.enrich

                    def patched_functions_list_enrich(self_,
                                                      functions,
                                                      component=None,
                                                      weights=None,
                                                      copy=True):
                        # Append to storage
                        original_functions_list_enrich(functions, component,
                                                       weights, copy)
                        # Update component name to basis component length
                        if component is not None:
                            if isinstance(component, dict):
                                assert len(component) == 1
                                for (_, component_to) in component.items():
                                    break
                                assert component_name == component_to
                            else:
                                assert component_name == component
                        self._update_component_name_to_basis_component_length(
                            component_name)
                        # Reset precomputed sub components
                        self._precomputed_sub_components.clear()
                        # Prepare trivial precomputed sub components
                        self._prepare_trivial_precomputed_sub_components()
                        # Reset precomputed slices
                        self._precomputed_slices.clear()
                        # Prepare trivial precomputed slice
                        self._prepare_trivial_precomputed_slice()

                    functions_list.enrich_patch = PatchInstanceMethod(
                        functions_list, "enrich",
                        patched_functions_list_enrich)
                    functions_list.enrich_patch.patch()

                for component_name in components_name:
                    patch_functions_list_enrich(
                        component_name, self._components[component_name])

        def enrich(self, functions, component=None, weights=None, copy=True):
            assert copy is True
            # Append to storage
            self._enrich(functions, component, weights, copy)

        @overload(
            object, None, (None, list_of(Number)), bool
        )  # the first argument is object in order to handle FunctionsList's AdditionalFunctionType
        def _enrich(self, functions, component, weights, copy):
            assert len(self._components) == 1
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            self._components[component_0].enrich(functions, None, weights,
                                                 copy)

        @overload(
            object, str, (None, list_of(Number)), bool
        )  # the first argument is object in order to handle FunctionsList's AdditionalFunctionType
        def _enrich(self, functions, component, weights, copy):
            assert component in self._components
            self._components[component].enrich(functions, component, weights,
                                               copy)

        @overload(
            object, dict_of(str, str), (None, list_of(Number)), bool
        )  # the first argument is object in order to handle FunctionsList's AdditionalFunctionType
        def _enrich(self, functions, component, weights, copy):
            assert len(component) == 1
            for (_, component_to) in component.items():
                break
            assert component_to in self._components
            self._components[component_to].enrich(functions, component,
                                                  weights)

        @overload(None)
        def _update_component_name_to_basis_component_length(self, component):
            assert len(self._components) == 1
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            self._component_name_to_basis_component_length[component_0] = len(
                self._components[component_0])

        @overload(str)
        def _update_component_name_to_basis_component_length(self, component):
            self._component_name_to_basis_component_length[component] = len(
                self._components[component])

        @overload(dict_of(str, str))
        def _update_component_name_to_basis_component_length(self, component):
            assert len(component) == 1
            for (_, component_to) in component.items():
                break
            assert component_to in self._components
            self._component_name_to_basis_component_length[component_to] = len(
                self._components[component_to])

        def _prepare_trivial_precomputed_sub_components(self):
            self._precomputed_sub_components[tuple(
                self._components_name)] = self

        def _prepare_trivial_precomputed_slice(self):
            if len(self._components) == 1:
                assert len(self._components_name) == 1
                component_0 = self._components_name[0]
                precomputed_slice_key_start = 0
                precomputed_slice_key_stop = self._component_name_to_basis_component_length[
                    component_0]
            else:
                precomputed_slice_key_start = list()
                precomputed_slice_key_stop = list()
                for component_name in self._components_name:
                    precomputed_slice_key_start.append(0)
                    precomputed_slice_key_stop.append(
                        self._component_name_to_basis_component_length[
                            component_name])
                precomputed_slice_key_start = tuple(
                    precomputed_slice_key_start)
                precomputed_slice_key_stop = tuple(precomputed_slice_key_stop)
            self._precomputed_slices[precomputed_slice_key_start,
                                     precomputed_slice_key_stop] = self

        def clear(self):
            components_name = self._components_name
            # Trick _init into re-initializing everything
            self._components_name = None
            self.init(components_name)

        def save(self, directory, filename):
            if len(self._components) > 1:

                def filename_and_component(component_name):
                    return filename + "_" + component_name
            else:

                def filename_and_component(component_name):
                    return filename

            for (component_name, functions_list) in self._components.items():
                functions_list.save(directory,
                                    filename_and_component(component_name))

        def load(self, directory, filename):
            return_value = True
            assert len(self._components) > 0
            if len(self._components) > 1:

                def filename_and_component(component_name):
                    return filename + "_" + component_name
            else:

                def filename_and_component(component_name):
                    return filename

            for (component_name, functions_list) in self._components.items():
                # Skip updating internal attributes while reading in basis functions, we will do that
                # only once at the end
                assert hasattr(functions_list, "enrich_patch")
                functions_list.enrich_patch.unpatch()
                # Load each component
                return_value_component = functions_list.load(
                    directory, filename_and_component(component_name))
                return_value = return_value and return_value_component
                # Populate component length
                self._update_component_name_to_basis_component_length(
                    component_name)
                # Restore patched enrich method
                functions_list.enrich_patch.patch()
            # Reset precomputed sub components
            self._precomputed_sub_components.clear()
            # Prepare trivial precomputed sub components
            self._prepare_trivial_precomputed_sub_components()
            # Reset precomputed slices
            self._precomputed_slices.clear()
            # Prepare trivial precomputed slice
            self._prepare_trivial_precomputed_slice()
            # Return
            return return_value

        @overload(
            online_backend.OnlineMatrix.Type(), )
        def __mul__(self, other):
            if isinstance(other.M, dict):
                assert set(other.M.keys()) == set(self._components_name)

            def BasisFunctionsMatrixWithInit(space):
                output = _BasisFunctionsMatrix.__new__(type(self), space)
                output.__init__(space)
                output.init(self._components_name)
                return output

            return wrapping.basis_functions_matrix_mul_online_matrix(
                self, other, BasisFunctionsMatrixWithInit)

        @overload(
            online_backend.OnlineFunction.Type(), )
        def __mul__(self, other):
            return self.__mul__(online_wrapping.function_to_vector(other))

        @overload(
            online_backend.OnlineVector.Type(), )
        def __mul__(self, other):
            if isinstance(other.N, dict):
                assert set(other.N.keys()) == set(self._components_name)
            return wrapping.basis_functions_matrix_mul_online_vector(
                self, other)

        @overload(
            ThetaType, )
        def __mul__(self, other):
            return wrapping.basis_functions_matrix_mul_online_vector(
                self, other)

        def __len__(self):
            assert len(self._components_name) == 1
            assert len(self._component_name_to_basis_component_length) == 1
            return self._component_name_to_basis_component_length[
                self._components_name[0]]

        @overload(int)
        def __getitem__(self, key):
            # spare the user an obvious extraction of the first component return basis function number key
            assert len(self._components) == 1
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            return self._components[component_0][key]

        @overload(str)
        def __getitem__(self, key):
            # return all basis functions for each component, then the user may use __getitem__ of FunctionsList to extract a single basis function
            return self._components[key]

        @overload(list_of(str))
        def __getitem__(self, key):
            return self._precompute_sub_components(key)

        @overload(slice)  # e.g. key = :N, return the first N functions
        def __getitem__(self, key):
            assert key.step is None
            return self._precompute_slice(key.start, key.stop)

        @overload(
            int, object
        )  # the second argument is object in order to handle FunctionsList's AdditionalFunctionType
        def __setitem__(self, key, item):
            assert len(
                self._components
            ) == 1, "Cannot set components, only single functions. Did you mean to call __getitem__ to extract a component and __setitem__ of a single function on that component?"
            assert len(self._components_name) == 1
            self._components[self._components_name[0]][key] = item

        @overload(None, int)
        def _precompute_slice(self, _, N_stop):
            return self._precompute_slice(0, N_stop)

        @overload(int, None)
        def _precompute_slice(self, N_start, _):
            return self._precompute_slice(N_start, len(self))

        @overload(int, int)
        def _precompute_slice(self, N_start, N_stop):
            if (N_start, N_stop) not in self._precomputed_slices:
                assert len(self._components) == 1
                output = _BasisFunctionsMatrix.__new__(type(self), self.space)
                output.__init__(self.space)
                output.init(self._components_name)
                for component_name in self._components_name:
                    output._components[component_name].enrich(
                        self._components[component_name][N_start:N_stop],
                        copy=False)
                self._precomputed_slices[N_start, N_stop] = output
            return self._precomputed_slices[N_start, N_stop]

        @overload(None, OnlineSizeDict)
        def _precompute_slice(self, _, N_stop):
            N_start = OnlineSizeDict()
            for component_name in self._components_name:
                N_start[component_name] = 0
            return self._precompute_slice(N_start, N_stop)

        @overload(OnlineSizeDict, None)
        def _precompute_slice(self, N_start, _):
            N_stop = OnlineSizeDict()
            for component_name in self._components_name:
                N_stop[
                    component_name] = self._component_name_to_basis_component_length[
                        component_name]
            return self._precompute_slice(N_start, len(self))

        @overload(OnlineSizeDict, OnlineSizeDict)
        def _precompute_slice(self, N_start, N_stop):
            assert set(N_start.keys()) == set(self._components_name)
            assert set(N_stop.keys()) == set(self._components_name)
            N_start_key = tuple(N_start[component_name]
                                for component_name in self._components_name)
            N_stop_key = tuple(N_stop[component_name]
                               for component_name in self._components_name)
            if (N_start_key, N_stop_key) not in self._precomputed_slices:
                output = _BasisFunctionsMatrix.__new__(type(self), self.space)
                output.__init__(self.space)
                output.init(self._components_name)
                for component_name in self._components_name:
                    output._components[component_name].enrich(
                        self._components[component_name]
                        [N_start[component_name]:N_stop[component_name]],
                        copy=False)
                self._precomputed_slices[N_start_key, N_stop_key] = output
            return self._precomputed_slices[N_start_key, N_stop_key]

        def _precompute_sub_components(self, sub_components):
            sub_components_key = tuple(sub_components)
            if sub_components_key not in self._precomputed_sub_components:
                assert set(sub_components).issubset(self._components_name)
                output = _BasisFunctionsMatrix.__new__(type(self), self.space,
                                                       sub_components)
                output.__init__(self.space, sub_components)
                output.init(sub_components)
                for component_name in sub_components:
                    output._components[component_name].enrich(
                        self._components[component_name],
                        component=component_name,
                        copy=True)
                self._precomputed_sub_components[sub_components_key] = output
            return self._precomputed_sub_components[sub_components_key]

        def __iter__(self):
            assert len(self._components) == 1
            assert len(self._components_name) == 1
            component_0 = self._components_name[0]
            return self._components[component_0].__iter__()