def python_string_to_sympy(string_expression: tuple_of(tuple_of(str)), x_symb: (Matrix, MatrixSymbol, None), mu_symb: (Matrix, MatrixSymbol, None)): assert all([len(si) == len(string_expression[0]) for si in string_expression[1:]]) sympy_expression = zeros(len(string_expression), len(string_expression[0])) for (i, si) in enumerate(string_expression): for (j, sij) in enumerate(si): sympy_expression[i, j] = sympify(sij, locals={"x": x_symb, "mu": mu_symb}) return ImmutableMatrix(sympy_expression)
def python_string_to_sympy(string_expression: tuple_of(tuple_of(str)), problem: ParametrizedProblem): """ Convert a matrix of strings (with math python syntax, e.g. **2 instead of pow(., 2)) to sympy """ x_symb = sympy_symbolic_coordinates(problem.V.mesh().geometry().dim(), MatrixListSymbol) mu_symb = MatrixListSymbol("mu", len(problem.mu), 1) return python_string_to_sympy(string_expression, x_symb, mu_symb)
def _AffineExpansionStorage(args: ( tuple_of(Form), tuple_of(Matrix.Type()), tuple_of(Vector.Type()), tuple_of((Form, Matrix.Type())), tuple_of((Form, Vector.Type())) )): return AffineExpansionStorage_Form(args)
def _product(thetas: ThetaType, operators: tuple_of(Matrix.Type())): output = tensor_copy(operators[0]) output.zero() for (theta, operator) in zip(thetas, operators): theta = float(theta) output += theta * operator return ProductOutput(output)
def _product(thetas: ThetaType, operators: tuple_of(ParametrizedTensorFactory)): operators_as_forms = tuple(operator._form for operator in operators) try: output = _product_parametrized_tensor_factories_output_cache[ operators_as_forms] except KeyError: # Keep the operators as ParametrizedTensorFactories and delay assembly as long as possible output = _product(thetas, operators_as_forms) output = ParametrizedTensorFactory(output.sum_product_return_value) problems = [ get_problem_from_parametrized_operator(operator) for operator in operators ] assert all([problem is problems[0] for problem in problems]) add_to_map_from_parametrized_operator_to_problem(output, problems[0]) output = ProductOutput(output) _product_parametrized_tensor_factories_output_cache[ operators_as_forms] = output _product_parametrized_tensor_factories_constants_cache[ operators_as_forms] = _product_forms_constants_cache[ operators_as_forms] return output else: constants = _product_parametrized_tensor_factories_constants_cache[ operators_as_forms] for (theta, constant) in zip(thetas, constants): theta = float(theta) constant.assign(theta) return output
def python_string_to_sympy(string_expression: tuple_of(str), x_symb: (Matrix, MatrixSymbol, None), mu_symb: (Matrix, MatrixSymbol, None)): sympy_expression = zeros(len(string_expression), 1) for (i, si) in enumerate(string_expression): sympy_expression[i] = sympify(si, locals={"x": x_symb, "mu": mu_symb}) return ImmutableMatrix(sympy_expression)
def _product(thetas: ThetaType, operators: tuple_of(Vector.Type())): output = tensor_copy(operators[0]) output.zero() for (theta, operator) in zip(thetas, operators): theta = float(theta) output.add_local(theta * operator.get_local()) output.apply("add") return ProductOutput(output)
class _Evaluate(object): @overload(backend.Matrix.Type(), None) def __call__(self, matrix, at): return matrix @overload(backend.Matrix.Type(), tuple_of(int)) def __call__(self, matrix, at): assert len(at) == 2 return matrix[at] @overload(backend.Vector.Type(), None) def __call__(self, vector, at): return vector @overload(backend.Vector.Type(), tuple_of(int)) def __call__(self, vector, at): assert len(at) == 1 return vector
def _function_from_ufl_component_tensor(expression: Product, indices: tuple_of(IndexBase)): factor_1 = expression.ufl_operands[0] factor_2 = expression.ufl_operands[1] assert isinstance(factor_1, (Number, ScalarValue)) or isinstance(factor_2, (Number, ScalarValue)) if isinstance(factor_1, (Number, ScalarValue)): factor_2 = as_tensor(factor_2, indices) else: # isinstance(factor_2, (Number, ScalarValue)) factor_1 = as_tensor(factor_1, indices) return _function_from_ufl_product(factor_1, factor_2)
def _diff_content( reference_items: (list_of(object), tuple_of(object)), current_items: (list_of(object), tuple_of(object)), tab: str): if len(reference_items) != len(current_items): return [ tab + "@@ different lengths @@" + "\n" + tab + "- " + str(len(reference_items)) + "\n" + tab + "+ " + str(len(current_items)) + "\n" ] else: diff_items = list() for (item_number, (reference_item, current_item)) in enumerate(zip(reference_items, current_items)): diff_item = _diff_content(reference_item, current_item, tab + "\t") if len(diff_item) > 0: for d in diff_item: diff_items.append(tab + "@@ " + str(item_number) + " @@" + "\n" + d) return diff_items
def test_inheritance_for_dict_of_keys_tuple_of(): class A(object): pass class B(object): pass class C(A): pass @dispatch(dict_of(tuple_of(A), int)) def f(x): return 'a' @dispatch(dict_of(tuple_of(B), int)) def f(x): return 'b' assert f({(A(), A()): 1}) == 'a' assert f({(B(), B()): 2}) == 'b' assert f({(C(), C()): 3}) == 'a' assert f({(C(), A()): 4}) == 'a' assert raises(UnavailableSignatureError, lambda: f({(B(), B()): 5.}))
def test_register_stacking__list_of__tuple_of(): f = Dispatcher('f') @f.register(list_of(int)) @f.register(tuple_of(int)) def rev(x): return x[::-1] assert f((1, 2, 3)) == (3, 2, 1) assert f([1, 2, 3]) == [3, 2, 1] assert raises(UnavailableSignatureError, lambda: f('hello')) assert rev('hello') == 'olleh'
def _product(thetas: ThetaType, operators: tuple_of(Form)): try: output = _product_forms_output_cache[operators] except KeyError: # Keep the operators as Forms and delay assembly as long as possible output = 0 constants = list() for (theta, operator) in zip(thetas, operators): theta = float(theta) constant = Constant(theta) output += constant * operator constants.append(constant) output = ProductOutput(output) _product_forms_output_cache[operators] = output _product_forms_constants_cache[operators] = constants return output else: constants = _product_forms_constants_cache[operators] for (theta, constant) in zip(thetas, constants): theta = float(theta) constant.assign(theta) return output
def _function_from_ufl_component_tensor(expression: Sum, indices: tuple_of(IndexBase)): addend_1 = as_tensor(expression.ufl_operands[0], indices) addend_2 = as_tensor(expression.ufl_operands[1], indices) return _function_from_ufl_sum(addend_1, addend_2)
def _function_from_ufl_component_tensor(expression: Division, indices: tuple_of(IndexBase)): nominator_function = as_tensor(expression.ufl_operands[0], indices) denominator = expression.ufl_operands[1] return _function_from_ufl_division(nominator_function, denominator)
# Copyright (C) 2015-2021 by the RBniCS authors # # This file is part of RBniCS. # # SPDX-License-Identifier: LGPL-3.0-or-later from numbers import Number from numpy import arange, isclose from rbnics.backends.abstract import TimeSeries as AbstractTimeSeries from rbnics.utils.decorators import BackendFor, overload, tuple_of @BackendFor("common", inputs=((tuple_of(Number), AbstractTimeSeries), (Number, None))) class TimeSeries(AbstractTimeSeries): def __init__(self, *args): assert len(args) in (1, 2) if len(args) == 1: other_time_series, = args assert isinstance(other_time_series, TimeSeries) self._time_interval = other_time_series._time_interval self._time_step_size = other_time_series._time_step_size else: time_interval, time_step_size = args self._time_interval = time_interval self._time_step_size = time_step_size self._times = arange(self._time_interval[0], self._time_interval[1] + self._time_step_size / 2., self._time_step_size).tolist() self._list = list() def stored_times(self): return self._times[:len(self._list)]
class ExactParametrizedFunctionsDecoratedProblem_Class( ParametrizedDifferentialProblem_DerivedClass): # Default initialization of members def __init__(self, V, **kwargs): # Call the parent initialization ParametrizedDifferentialProblem_DerivedClass.__init__( self, V, **kwargs) # Storage for symbolic parameters self.mu_symbolic = None # Store values passed to decorator self._store_exact_evaluation_stages(stages) # Generate offline online backend for current problem self.offline_online_backend = OfflineOnlineBackend(self.name()) @overload(str) def _store_exact_evaluation_stages(self, stage): assert stages != "online", "This choice does not make any sense because it requires an EIM/DEIM offline stage which then is not used online" assert stages == "offline" self._apply_exact_evaluation_at_stages = (stages, ) @overload(tuple_of(str)) def _store_exact_evaluation_stages(self, stage): assert len(stages) in (1, 2) assert stages[0] in ("offline", "online") if len(stages) > 1: assert stages[1] in ("offline", "online") assert stages[0] != stages[1] self._apply_exact_evaluation_at_stages = stages def init(self): has_disable_init_operators = hasattr( self, "disable_init_operators" ) # may be shared between EIM/DEIM and exact evaluation # Call parent's method (enforcing an empty parent call to _init_operators) if not has_disable_init_operators: self.disable_init_operators = PatchInstanceMethod( self, "_init_operators", lambda self_: None) self.disable_init_operators.patch() ParametrizedDifferentialProblem_DerivedClass.init(self) self.disable_init_operators.unpatch() if not has_disable_init_operators: del self.disable_init_operators # Then, initialize exact operators self._init_operators_exact() def _init_operators_exact(self): # Initialize symbolic parameters only once if self.mu_symbolic is None: self.mu_symbolic = SymbolicParameters( self, self.V, self.mu) # Initialize offline/online switch storage only once (may be shared between EIM/DEIM and exact evaluation) OfflineOnlineClassMethod = self.offline_online_backend.OfflineOnlineClassMethod OfflineOnlineExpansionStorage = self.offline_online_backend.OfflineOnlineExpansionStorage OfflineOnlineExpansionStorageSize = self.offline_online_backend.OfflineOnlineExpansionStorageSize OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch if not isinstance(self.Q, OfflineOnlineSwitch): assert isinstance(self.Q, dict) assert len(self.Q) is 0 self.Q = OfflineOnlineExpansionStorageSize() if not isinstance(self.operator, OfflineOnlineSwitch): assert isinstance(self.operator, dict) assert len(self.operator) is 0 self.operator = OfflineOnlineExpansionStorage( self, "OperatorExpansionStorage") if not isinstance(self.assemble_operator, OfflineOnlineSwitch): assert inspect.ismethod(self.assemble_operator) self._assemble_operator_exact = self.assemble_operator self.assemble_operator = OfflineOnlineClassMethod( self, "assemble_operator") if not isinstance(self.compute_theta, OfflineOnlineSwitch): assert inspect.ismethod(self.compute_theta) self._compute_theta_exact = self.compute_theta self.compute_theta = OfflineOnlineClassMethod( self, "compute_theta") # Temporarily replace float parameters with symbols, so that the forms do not hardcode # the current value of the parameter while assemblying. mu_float = self.mu self.mu = self.mu_symbolic # Setup offline/online switches former_stage = OfflineOnlineSwitch.get_current_stage() for stage_exact in self._apply_exact_evaluation_at_stages: OfflineOnlineSwitch.set_current_stage(stage_exact) # Enforce exact evaluation of assemble_operator and compute_theta self.assemble_operator.attach( self._assemble_operator_exact, lambda term: True) self.compute_theta.attach(self._compute_theta_exact, lambda term: True) # Setup offline/online operators storage with exact operators self.operator.set_is_affine(False) self._init_operators() self.operator.unset_is_affine() # Restore former stage in offline/online switch storage OfflineOnlineSwitch.set_current_stage(former_stage) # Restore float parameters self.mu = mu_float def solve(self, **kwargs): # Exact operators should be used regardless of the current stage OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch former_stage = OfflineOnlineSwitch.get_current_stage() OfflineOnlineSwitch.set_current_stage("offline") # Call Parent method solution = ParametrizedDifferentialProblem_DerivedClass.solve( self, **kwargs) # Restore former stage in offline/online switch storage OfflineOnlineSwitch.set_current_stage(former_stage) # Return return solution def compute_output(self): # Exact operators should be used regardless of the current stage OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch former_stage = OfflineOnlineSwitch.get_current_stage() OfflineOnlineSwitch.set_current_stage("offline") # Call Parent method output = ParametrizedDifferentialProblem_DerivedClass.compute_output( self) # Restore former stage in offline/online switch storage OfflineOnlineSwitch.set_current_stage(former_stage) # Return return output def _cache_key_from_kwargs(self, **kwargs): cache_key = ParametrizedDifferentialProblem_DerivedClass._cache_key_from_kwargs( self, **kwargs) # Change cache key depending on current stage OfflineOnlineSwitch = self.offline_online_backend.OfflineOnlineSwitch if OfflineOnlineSwitch.get_current_stage( ) in self._apply_exact_evaluation_at_stages: # Append current stage to cache key cache_key = cache_key + ("exact_evaluation", ) # Return return cache_key
class OnlineNonHierarchicalAffineExpansionStorage(object): def __init__(self, arg1): self._content = dict() self._len = arg1 @overload(slice) def __getitem__(self, key): N = self._convert_key(key) assert N in self._content return self._content[N] @overload(tuple_of(slice)) def __getitem__(self, key): assert len(key) is 2 assert key[0] == key[1] return self.__getitem__(key[0]) @overload(slice, OnlineAffineExpansionStorage) def __setitem__(self, key, item): N = self._convert_key(key) assert len(item) is self._len self._content[N] = item @overload(tuple_of(slice), OnlineAffineExpansionStorage) def __setitem__(self, key, item): assert len(key) is 2 assert key[0] == key[1] return self.__setitem__(key[0], item) def _convert_key(self, key): assert key.start is None assert key.step is None assert isinstance(key.stop, (dict, int)) if isinstance(key.stop, dict): assert len(key.stop) is 1 assert "u" in key.stop N = key.stop["u"] else: N = key.stop return N def save(self, directory, filename): # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) full_directory.create() # Save Nmax self._save_Nmax(full_directory) # Save non hierarchical content for (N, affine_expansion_N) in self._content.items(): self._save_content(N, affine_expansion_N, directory, filename) def _save_Nmax(self, full_directory): if len(self._content) > 0: assert min(self._content.keys()) == 1 assert max(self._content.keys()) == len(self._content) NmaxIO.save_file(len(self._content), full_directory, "Nmax") def _save_content(self, N, affine_expansion_N, directory, filename): affine_expansion_N.save(directory, filename + "_N=" + str(N)) def load(self, directory, filename): if len(self._content) > 0: # avoid loading multiple times return False # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) # Load Nmax Nmax = self._load_Nmax(full_directory) # Load non hierarchical content for N in range(1, Nmax + 1): self._content[N] = self._load_content(N, directory, filename) # Return return True def _load_Nmax(self, full_directory): assert NmaxIO.exists_file(full_directory, "Nmax") return NmaxIO.load_file(full_directory, "Nmax") def _load_content(self, N, directory, filename): affine_expansion_N = OnlineAffineExpansionStorage(self._len) loaded = affine_expansion_N.load(directory, filename + "_N=" + str(N)) assert loaded is True return affine_expansion_N def __len__(self): return self._len
class NonAffineExpansionStorage(AbstractNonAffineExpansionStorage): def __init__(self, *shape): self._shape = shape self._type = "empty" self._content = dict() self._precomputed_slices = Cache( ) # from tuple to NonAffineExpansionStorage assert len(shape) in (1, 2) if len(shape) is 1: self._smallest_key = 0 self._largest_key = shape[0] - 1 else: self._smallest_key = (0, 0) self._largest_key = (shape[0] - 1, shape[1] - 1) def save(self, directory, filename): # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) full_directory.create() # Export depending on type TypeIO.save_file(self._type, full_directory, "type") assert self._type in ("basis_functions_matrix", "empty", "error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22", "functions_list", "operators") if self._type in ("basis_functions_matrix", "functions_list"): # Save delayed functions delayed_functions = self._content[self._type] it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: delayed_function = delayed_functions[it.multi_index] delayed_function.save(full_directory, "delayed_functions_" + str(it.index)) it.iternext() elif self._type == "empty": pass elif self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22"): # Save delayed functions delayed_function_type = { DelayedBasisFunctionsMatrix: "DelayedBasisFunctionsMatrix", DelayedLinearSolver: "DelayedLinearSolver" } assert len(self._content["delayed_functions"]) is 2 for (index, delayed_functions) in enumerate( self._content["delayed_functions"]): it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: delayed_function = delayed_functions[it.index] DelayedFunctionsTypeIO.save_file( delayed_function_type[type(delayed_function)], full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_type") DelayedFunctionsProblemNameIO.save_file( delayed_function.get_problem_name(), full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_problem_name") delayed_function.save( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_content") it.iternext() ErrorEstimationInnerProductIO.save_file( get_reduced_problem_from_error_estimation_inner_product( self._content["inner_product_matrix"]).truth_problem.name( ), full_directory, "inner_product_matrix_problem_name") elif self._type == "operators": # Save truth content it = NonAffineExpansionStorageContent_Iterator( self._content["truth_operators"], flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: operator = self._content["truth_operators"][it.multi_index] assert isinstance( operator, (AbstractParametrizedTensorFactory, NumericForm)) if isinstance(operator, AbstractParametrizedTensorFactory): problem_name = get_problem_from_parametrized_operator( operator).name() (term, index) = get_term_and_index_from_parametrized_operator( operator) TruthContentItemIO.save_file( "ParametrizedTensorFactory", full_directory, "truth_operator_" + str(it.index) + "_type") TruthContentItemIO.save_file( (problem_name, term, index), full_directory, "truth_operator_" + str(it.index)) elif isinstance(operator, NumericForm): TruthContentItemIO.save_file( "NumericForm", full_directory, "truth_operator_" + str(it.index) + "_type") TruthContentItemIO.save_file( operator, full_directory, "truth_operator_" + str(it.index)) else: raise TypeError("Invalid operator type") it.iternext() assert "truth_operators_as_expansion_storage" in self._content # Save basis functions content assert len(self._content["basis_functions"]) in (0, 1, 2) BasisFunctionsContentLengthIO.save_file( len(self._content["basis_functions"]), full_directory, "basis_functions_length") for (index, basis_functions) in enumerate( self._content["basis_functions"]): BasisFunctionsProblemNameIO.save_file( get_reduced_problem_from_basis_functions( basis_functions).truth_problem.name(), full_directory, "basis_functions_" + str(index) + "_problem_name") BasisFunctionsProblemNameIO.save_file( basis_functions._components_name, full_directory, "basis_functions_" + str(index) + "_components_name") else: raise ValueError("Invalid type") def load(self, directory, filename): if self._type != "empty": # avoid loading multiple times if self._type in ("basis_functions_matrix", "functions_list"): delayed_functions = self._content[self._type] it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: if isinstance(delayed_functions[it.multi_index], DelayedFunctionsList): assert self._type == "functions_list" if len( delayed_functions[it.multi_index] ) > 0: # ... unless it is an empty FunctionsList return False elif isinstance(delayed_functions[it.multi_index], DelayedBasisFunctionsMatrix): assert self._type == "basis_functions_matrix" if sum( delayed_functions[it.multi_index]. _component_name_to_basis_component_length. values() ) > 0: # ... unless it is an empty BasisFunctionsMatrix return False else: raise TypeError("Invalid delayed functions") it.iternext() else: return False # Get full directory name full_directory = Folders.Folder(os.path.join(str(directory), filename)) # Detect trivial case assert TypeIO.exists_file(full_directory, "type") imported_type = TypeIO.load_file(full_directory, "type") self._type = imported_type assert self._type in ("basis_functions_matrix", "empty", "error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22", "functions_list", "operators") if self._type in ("basis_functions_matrix", "functions_list"): # Load delayed functions assert self._type in self._content delayed_functions = self._content[self._type] it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "multi_index", "refs_ok"]) while not it.finished: delayed_function = delayed_functions[it.multi_index] delayed_function.load(full_directory, "delayed_functions_" + str(it.index)) it.iternext() elif self._type == "empty": pass elif self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22"): # Load delayed functions assert "delayed_functions" not in self._content self._content["delayed_functions"] = [ NonAffineExpansionStorageContent_Base(self._shape[0], dtype=object), NonAffineExpansionStorageContent_Base(self._shape[1], dtype=object) ] for (index, delayed_functions) in enumerate( self._content["delayed_functions"]): it = NonAffineExpansionStorageContent_Iterator( delayed_functions, flags=["c_index", "refs_ok"]) while not it.finished: assert DelayedFunctionsTypeIO.exists_file( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_type") delayed_function_type = DelayedFunctionsTypeIO.load_file( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_type") assert DelayedFunctionsProblemNameIO.exists_file( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_problem_name") delayed_function_problem_name = DelayedFunctionsProblemNameIO.load_file( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_problem_name") delayed_function_problem = get_problem_from_problem_name( delayed_function_problem_name) assert delayed_function_type in ( "DelayedBasisFunctionsMatrix", "DelayedLinearSolver") if delayed_function_type == "DelayedBasisFunctionsMatrix": delayed_function = DelayedBasisFunctionsMatrix( delayed_function_problem.V) delayed_function.init( delayed_function_problem.components) elif delayed_function_type == "DelayedLinearSolver": delayed_function = DelayedLinearSolver() else: raise ValueError("Invalid delayed function") delayed_function.load( full_directory, "delayed_functions_" + str(index) + "_" + str(it.index) + "_content") delayed_functions[it.index] = delayed_function it.iternext() # Load inner product assert ErrorEstimationInnerProductIO.exists_file( full_directory, "inner_product_matrix_problem_name") inner_product_matrix_problem_name = ErrorEstimationInnerProductIO.load_file( full_directory, "inner_product_matrix_problem_name") inner_product_matrix_problem = get_problem_from_problem_name( inner_product_matrix_problem_name) inner_product_matrix_reduced_problem = get_reduced_problem_from_problem( inner_product_matrix_problem) self._content[ "inner_product_matrix"] = inner_product_matrix_reduced_problem._error_estimation_inner_product # Recompute shape assert "delayed_functions_shape" not in self._content self._content["delayed_functions_shape"] = DelayedTransposeShape( (self._content["delayed_functions"][0][0], self._content["delayed_functions"][1][0])) # Prepare precomputed slices self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice() elif self._type == "empty": pass elif self._type == "operators": # Load truth content assert "truth_operators" not in self._content self._content[ "truth_operators"] = NonAffineExpansionStorageContent_Base( self._shape, dtype=object) it = NonAffineExpansionStorageContent_Iterator( self._content["truth_operators"], flags=["c_index", "multi_index", "refs_ok"]) while not it.finished: assert TruthContentItemIO.exists_file( full_directory, "truth_operator_" + str(it.index) + "_type") operator_type = TruthContentItemIO.load_file( full_directory, "truth_operator_" + str(it.index) + "_type") assert operator_type in ("NumericForm", "ParametrizedTensorFactory") if operator_type == "NumericForm": assert TruthContentItemIO.exists_file( full_directory, "truth_operator_" + str(it.index)) value = TruthContentItemIO.load_file( full_directory, "truth_operator_" + str(it.index)) self._content["truth_operators"][ it.multi_index] = NumericForm(value) elif operator_type == "ParametrizedTensorFactory": assert TruthContentItemIO.exists_file( full_directory, "truth_operator_" + str(it.index)) (problem_name, term, index) = TruthContentItemIO.load_file( full_directory, "truth_operator_" + str(it.index)) truth_problem = get_problem_from_problem_name(problem_name) self._content["truth_operators"][ it.multi_index] = truth_problem.operator[term][index] else: raise ValueError("Invalid operator type") it.iternext() assert "truth_operators_as_expansion_storage" not in self._content self._prepare_truth_operators_as_expansion_storage() # Load basis functions content assert BasisFunctionsContentLengthIO.exists_file( full_directory, "basis_functions_length") basis_functions_length = BasisFunctionsContentLengthIO.load_file( full_directory, "basis_functions_length") assert basis_functions_length in (0, 1, 2) assert "basis_functions" not in self._content self._content["basis_functions"] = list() for index in range(basis_functions_length): assert BasisFunctionsProblemNameIO.exists_file( full_directory, "basis_functions_" + str(index) + "_problem_name") basis_functions_problem_name = BasisFunctionsProblemNameIO.load_file( full_directory, "basis_functions_" + str(index) + "_problem_name") assert BasisFunctionsProblemNameIO.exists_file( full_directory, "basis_functions_" + str(index) + "_components_name") basis_functions_components_name = BasisFunctionsProblemNameIO.load_file( full_directory, "basis_functions_" + str(index) + "_components_name") basis_functions_problem = get_problem_from_problem_name( basis_functions_problem_name) basis_functions_reduced_problem = get_reduced_problem_from_problem( basis_functions_problem) basis_functions = basis_functions_reduced_problem.basis_functions if basis_functions_components_name != basis_functions_problem.components: basis_functions = basis_functions[ basis_functions_components_name] self._content["basis_functions"].append(basis_functions) # Recompute shape self._content["basis_functions_shape"] = DelayedTransposeShape( self._content["basis_functions"]) # Reset precomputed slices self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice() else: raise ValueError("Invalid type") return True def _prepare_trivial_precomputed_slice(self): empty_slice = slice(None) assert self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22", "operators") if self._type == "error_estimation_operators_11": pass # nothing to be done (scalar content) elif self._type == "error_estimation_operators_21": assert "delayed_functions" in self._content assert len(self._content["delayed_functions"]) is 2 assert "delayed_functions_shape" in self._content slice_ = slice_to_array( self._content["delayed_functions_shape"], empty_slice, self._content["delayed_functions_shape"]. _component_name_to_basis_component_length, self._content["delayed_functions_shape"]. _component_name_to_basis_component_index) self._precomputed_slices[slice_] = self elif self._type == "error_estimation_operators_22": assert "delayed_functions" in self._content assert len(self._content["delayed_functions"]) is 2 assert "delayed_functions_shape" in self._content slice_ = slice_to_array( self._content["delayed_functions_shape"], (empty_slice, empty_slice), self._content["delayed_functions_shape"]. _component_name_to_basis_component_length, self._content["delayed_functions_shape"]. _component_name_to_basis_component_index) self._precomputed_slices[slice_] = self elif self._type == "operators": assert len(self._content["basis_functions"]) in (0, 1, 2) assert "basis_functions_shape" in self._content if len(self._content["basis_functions"]) is 0: pass # nothing to be done (scalar content) elif len(self._content["basis_functions"]) is 1: slice_ = slice_to_array( self._content["basis_functions_shape"], empty_slice, self._content["basis_functions_shape"]. _component_name_to_basis_component_length, self._content["basis_functions_shape"]. _component_name_to_basis_component_index) self._precomputed_slices[slice_] = self elif len(self._content["basis_functions"]) is 2: slices = slice_to_array( self._content["basis_functions_shape"], (empty_slice, empty_slice), self._content["basis_functions_shape"]. _component_name_to_basis_component_length, self._content["basis_functions_shape"]. _component_name_to_basis_component_index) self._precomputed_slices[slices] = self else: raise ValueError("Invalid length") else: raise ValueError("Invalid type") @overload( slice, ) def __getitem__(self, key): assert self._type in ("error_estimation_operators_21", "operators") if self._type == "error_estimation_operators_21": assert "delayed_functions" in self._content assert len(self._content["delayed_functions"]) is 2 assert "delayed_functions_shape" in self._content slice_ = slice_to_array( self._content["delayed_functions_shape"], key, self._content["delayed_functions_shape"]. _component_name_to_basis_component_length, self._content["delayed_functions_shape"]. _component_name_to_basis_component_index) if slice_ in self._precomputed_slices: return self._precomputed_slices[slice_] else: output = NonAffineExpansionStorage.__new__( type(self), *self._shape) output.__init__(*self._shape) output._type = self._type output._content["inner_product_matrix"] = self._content[ "inner_product_matrix"] output._content["delayed_functions"] = [ NonAffineExpansionStorageContent_Base(self._shape[0], dtype=object), NonAffineExpansionStorageContent_Base(self._shape[1], dtype=object) ] for q in range(self._shape[0]): output._content["delayed_functions"][0][q] = self._content[ "delayed_functions"][0][q][key] for q in range(self._shape[1]): output._content["delayed_functions"][1][q] = self._content[ "delayed_functions"][1][q] output._content[ "delayed_functions_shape"] = DelayedTransposeShape( (output._content["delayed_functions"][0][0], output._content["delayed_functions"][1][0])) self._precomputed_slices[slice_] = output return output elif self._type == "operators": assert "basis_functions" in self._content assert len(self._content["basis_functions"]) is 1 assert "basis_functions_shape" in self._content slice_ = slice_to_array( self._content["basis_functions_shape"], key, self._content["basis_functions_shape"]. _component_name_to_basis_component_length, self._content["basis_functions_shape"]. _component_name_to_basis_component_index) if slice_ in self._precomputed_slices: return self._precomputed_slices[slice_] else: output = NonAffineExpansionStorage.__new__( type(self), *self._shape) output.__init__(*self._shape) output._type = self._type output._content["truth_operators"] = self._content[ "truth_operators"] output._content[ "truth_operators_as_expansion_storage"] = self._content[ "truth_operators_as_expansion_storage"] output._content["basis_functions"] = list() output._content["basis_functions"].append( self._content["basis_functions"][0][key]) output._content[ "basis_functions_shape"] = DelayedTransposeShape( output._content["basis_functions"]) self._precomputed_slices[slice_] = output return output else: raise ValueError("Invalid type") @overload( tuple_of(slice), ) def __getitem__(self, key): assert self._type in ("error_estimation_operators_22", "operators") if self._type == "error_estimation_operators_22": assert len(key) is 2 assert "delayed_functions" in self._content assert len(self._content["delayed_functions"]) is 2 assert "delayed_functions_shape" in self._content slice_ = slice_to_array( self._content["delayed_functions_shape"], key, self._content["delayed_functions_shape"]. _component_name_to_basis_component_length, self._content["delayed_functions_shape"]. _component_name_to_basis_component_index) if slice_ in self._precomputed_slices: return self._precomputed_slices[slice_] else: output = NonAffineExpansionStorage.__new__( type(self), *self._shape) output.__init__(*self._shape) output._type = self._type output._content["inner_product_matrix"] = self._content[ "inner_product_matrix"] output._content["delayed_functions"] = [ NonAffineExpansionStorageContent_Base(self._shape[0], dtype=object), NonAffineExpansionStorageContent_Base(self._shape[1], dtype=object) ] for q in range(self._shape[0]): output._content["delayed_functions"][0][q] = self._content[ "delayed_functions"][0][q][key[0]] for q in range(self._shape[1]): output._content["delayed_functions"][1][q] = self._content[ "delayed_functions"][1][q][key[1]] output._content[ "delayed_functions_shape"] = DelayedTransposeShape( (output._content["delayed_functions"][0][0], output._content["delayed_functions"][1][0])) self._precomputed_slices[slice_] = output return output elif self._type == "operators": assert len(key) is 2 assert "basis_functions" in self._content assert len(self._content["basis_functions"]) is 2 assert "basis_functions_shape" in self._content slices = slice_to_array( self._content["basis_functions_shape"], key, self._content["basis_functions_shape"]. _component_name_to_basis_component_length, self._content["basis_functions_shape"]. _component_name_to_basis_component_index) if slices in self._precomputed_slices: return self._precomputed_slices[slices] else: output = NonAffineExpansionStorage.__new__( type(self), *self._shape) output.__init__(*self._shape) output._type = self._type output._content["truth_operators"] = self._content[ "truth_operators"] output._content[ "truth_operators_as_expansion_storage"] = self._content[ "truth_operators_as_expansion_storage"] output._content["basis_functions"] = list() output._content["basis_functions"].append( self._content["basis_functions"][0][key[0]]) output._content["basis_functions"].append( self._content["basis_functions"][1][key[1]]) output._content[ "basis_functions_shape"] = DelayedTransposeShape( output._content["basis_functions"]) self._precomputed_slices[slices] = output return output else: raise ValueError("Invalid type") @overload( int, ) def __getitem__(self, key): assert self._type in ("basis_functions_matrix", "functions_list", "operators") if self._type in ("basis_functions_matrix", "functions_list"): return self._content[self._type][key] elif self._type == "operators": return self._delay_transpose(self._content["basis_functions"], self._content["truth_operators"][key]) else: raise ValueError("Invalid type") @overload( tuple_of(int), ) def __getitem__(self, key): assert self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22") return self._delay_transpose( (self._content["delayed_functions"][0][key[0]], self._content["delayed_functions"][1][key[1]]), self._content["inner_product_matrix"]) def __iter__(self): assert self._type in ("basis_functions_matrix", "functions_list", "operators") if self._type in ("basis_functions_matrix", "functions_list"): return self._content[self._type].__iter__() elif self._type == "operators": return (self._delay_transpose(self._content["basis_functions"], op) for op in self._content["truth_operators"].__iter__()) else: raise ValueError("Invalid type") @overload((int, tuple_of(int)), AbstractBasisFunctionsMatrix) def __setitem__(self, key, item): if self._type != "empty": assert self._type == "basis_functions_matrix" else: self._type = "basis_functions_matrix" self._content[self._type] = NonAffineExpansionStorageContent_Base( self._shape, dtype=object) self._content[self._type][key] = DelayedBasisFunctionsMatrix( item.space) self._content[self._type][key].init(item._components_name) @overload((int, tuple_of(int)), AbstractFunctionsList) def __setitem__(self, key, item): if self._type != "empty": assert self._type == "functions_list" else: self._type = "functions_list" self._content[self._type] = NonAffineExpansionStorageContent_Base( self._shape, dtype=object) self._content[self._type][key] = DelayedFunctionsList(item.space) @overload((int, tuple_of(int)), DelayedTranspose) def __setitem__(self, key, item): assert isinstance(item._args[0], (AbstractBasisFunctionsMatrix, DelayedBasisFunctionsMatrix, DelayedLinearSolver)) if isinstance(item._args[0], AbstractBasisFunctionsMatrix): if self._type != "empty": assert self._type == "operators" else: self._type = "operators" # Reset attributes if size has changed if key == self._smallest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._content.pop("truth_operators_as_expansion_storage", None) self._content[ "truth_operators"] = NonAffineExpansionStorageContent_Base( self._shape, dtype=object) self._content["basis_functions"] = list() self._content.pop("basis_functions_shape", None) # Store assert len(item._args) in (2, 3) if len(self._content["basis_functions"]) is 0: assert isinstance(item._args[0], AbstractBasisFunctionsMatrix) self._content["basis_functions"].append(item._args[0]) else: assert item._args[0] is self._content["basis_functions"][0] self._content["truth_operators"][key] = item._args[1] if len(item._args) > 2: if len(self._content["basis_functions"]) is 1: assert isinstance(item._args[2], AbstractBasisFunctionsMatrix) self._content["basis_functions"].append(item._args[2]) else: assert item._args[2] is self._content["basis_functions"][1] # Recompute shape if "basis_functions_shape" not in self._content: self._content["basis_functions_shape"] = DelayedTransposeShape( self._content["basis_functions"]) # Compute truth expansion storage and prepare precomputed slices if key == self._largest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._prepare_truth_operators_as_expansion_storage() self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice() elif isinstance(item._args[0], (DelayedBasisFunctionsMatrix, DelayedLinearSolver)): assert len(item._args) is 3 assert isinstance( item._args[2], (DelayedBasisFunctionsMatrix, DelayedLinearSolver)) if isinstance(item._args[0], DelayedLinearSolver): assert isinstance(item._args[2], DelayedLinearSolver) if self._type != "empty": assert self._type == "error_estimation_operators_11" else: self._type = "error_estimation_operators_11" elif isinstance(item._args[0], DelayedBasisFunctionsMatrix): if isinstance(item._args[2], DelayedLinearSolver): if self._type != "empty": assert self._type == "error_estimation_operators_21" else: self._type = "error_estimation_operators_21" elif isinstance(item._args[2], DelayedBasisFunctionsMatrix): if self._type != "empty": assert self._type == "error_estimation_operators_22" else: self._type = "error_estimation_operators_22" else: raise TypeError( "Invalid arguments to NonAffineExpansionStorage") else: raise TypeError( "Invalid arguments to NonAffineExpansionStorage") # Reset attributes if size has changed if key == self._smallest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._content["delayed_functions"] = [ NonAffineExpansionStorageContent_Base(self._shape[0], dtype=object), NonAffineExpansionStorageContent_Base(self._shape[1], dtype=object) ] self._content.pop("delayed_functions_shape", None) self._content.pop("inner_product_matrix", None) # Store if key[1] == self._smallest_key[ 1]: # this assumes that __getitem__ is not random acces but called for increasing key self._content["delayed_functions"][0][key[0]] = item._args[0] else: assert item._args[0] is self._content["delayed_functions"][0][ key[0]] if "inner_product_matrix" not in self._content: self._content["inner_product_matrix"] = item._args[1] else: assert item._args[1] is self._content["inner_product_matrix"] if key[0] == self._smallest_key[ 0]: # this assumes that __getitem__ is not random acces but called for increasing key self._content["delayed_functions"][1][key[1]] = item._args[2] else: assert item._args[2] is self._content["delayed_functions"][1][ key[1]] # Recompute shape if "delayed_functions_shape" not in self._content: self._content[ "delayed_functions_shape"] = DelayedTransposeShape( (item._args[0], item._args[2])) else: assert DelayedTransposeShape(( item._args[0], item._args[2])) == self._content["delayed_functions_shape"] # Prepare precomputed slices if key == self._largest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice() else: raise TypeError("Invalid arguments to NonAffineExpansionStorage") @overload((int, tuple_of(int)), (AbstractParametrizedTensorFactory, Number)) def __setitem__(self, key, item): if self._type != "empty": assert self._type == "operators" else: self._type = "operators" # Reset attributes, similarly to what is done for Vector and Matrix operators if key == self._smallest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._content.pop("truth_operators_as_expansion_storage", None) self._content[ "truth_operators"] = NonAffineExpansionStorageContent_Base( self._shape, dtype=object) self._content["basis_functions"] = list() # will stay empty self._content.pop("basis_functions_shape", None) # Store if isinstance(item, Number): self._content["truth_operators"][key] = NumericForm(item) else: assert isinstance(item, AbstractParametrizedTensorFactory) assert len(item._spaces) is 0 self._content["truth_operators"][key] = item # Recompute (trivial) shape if "basis_functions_shape" not in self._content: self._content["basis_functions_shape"] = DelayedTransposeShape( self._content["basis_functions"]) # Compute truth expansion storage and prepare precomputed slices if key == self._largest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._prepare_truth_operators_as_expansion_storage() def _prepare_truth_operators_as_expansion_storage(self): from rbnics.backends import NonAffineExpansionStorage assert self._type == "operators" assert self.order() is 1 extracted_operators = tuple(op._form for op in self._content["truth_operators"]) assert "truth_operators_as_expansion_storage" not in self._content self._content[ "truth_operators_as_expansion_storage"] = NonAffineExpansionStorage( extracted_operators) if not all(isinstance(op, Number) for op in extracted_operators): problems = [ get_problem_from_parametrized_operator(op) for op in self._content["truth_operators"] ] assert all([problem is problems[0] for problem in problems]) for extracted_operator in self._content[ "truth_operators_as_expansion_storage"]: add_to_map_from_parametrized_operator_to_problem( extracted_operator, problems[0]) def __len__(self): assert self._type == "operators" assert self.order() is 1 return self._shape[0] def order(self): assert self._type in ("error_estimation_operators_11", "error_estimation_operators_21", "error_estimation_operators_22", "operators") return len(self._shape) def _delay_transpose(self, pre_post, op): assert len(pre_post) in (0, 1, 2) if len(pre_post) is 0: return op elif len(pre_post) is 1: return DelayedTranspose(pre_post[0]) * op else: return DelayedTranspose(pre_post[0]) * op * pre_post[1]
def get_function_subspace(function: Function, component: (int, list_of(str), str, tuple_of(int))): return get_function_subspace(function.function_space(), component)
# This file is part of RBniCS. # # RBniCS is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # RBniCS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with RBniCS. If not, see <http://www.gnu.org/licenses/>. # from ufl import Form from rbnics.backends.basic import NonAffineExpansionStorage as BasicNonAffineExpansionStorage from rbnics.backends.dolfin.parametrized_tensor_factory import ParametrizedTensorFactory from rbnics.utils.decorators import BackendFor, ModuleWrapper, tuple_of backend = ModuleWrapper(ParametrizedTensorFactory) wrapping = ModuleWrapper() NonAffineExpansionStorage_Base = BasicNonAffineExpansionStorage( backend, wrapping) @BackendFor("dolfin", inputs=(tuple_of(Form), )) class NonAffineExpansionStorage(NonAffineExpansionStorage_Base): pass
def get_mpi_comm(V: tuple_of(FunctionSpace)): assert len(V) in (1, 2) return get_mpi_comm(V[0])
def _sum(args: (list_of(Number), tuple_of(Number))): return python_sum(args)
# Copyright (C) 2015-2021 by the RBniCS authors # # This file is part of RBniCS. # # SPDX-License-Identifier: LGPL-3.0-or-later from numbers import Number from rbnics.backends.common.product import ProductOutput from rbnics.utils.decorators import backend_for, list_of, overload, tuple_of python_sum = sum # product function to assemble truth/reduced affine expansions. To be used in combination with product, # even though product actually carries out both the sum and the product! @backend_for("common", inputs=((list_of(Number), ProductOutput, tuple_of(Number)), )) def sum(args): return _sum(args) @overload def _sum(args: ProductOutput): return args.sum_product_return_value @overload def _sum(args: (list_of(Number), tuple_of(Number))): return python_sum(args)
class _FunctionsList(AbstractFunctionsList): def __init__(self, space, component): if component is None: self.space = space else: self.space = wrapping.get_function_subspace(space, component) self.mpi_comm = wrapping.get_mpi_comm(space) self._list = list() # of functions self._precomputed_slices = Cache() # from tuple to FunctionsList def enrich(self, functions, component=None, weights=None, copy=True): # Append to storage self._enrich(functions, component, weights, copy) # Reset precomputed slices self._precomputed_slices = Cache() # Prepare trivial precomputed slice self._precomputed_slices[0, len(self._list)] = self @overload(backend.Function.Type(), (None, str, dict_of(str, str)), (None, Number), bool) def _enrich(self, function, component, weight, copy): self._add_to_list(function, component, weight, copy) @overload((lambda cls: cls, list_of( backend.Function.Type()), tuple_of(backend.Function.Type())), (None, str, dict_of(str, str)), (None, list_of(Number)), bool) def _enrich(self, functions, component, weights, copy): if weights is not None: assert len(weights) == len(functions) for (index, function) in enumerate(functions): self._add_to_list(function, component, weights[index], copy) else: for function in functions: self._add_to_list(function, component, None, copy) @overload(TimeSeries, (None, str, dict_of(str, str)), (None, list_of(Number)), bool) def _enrich(self, functions, component, weights, copy): self._enrich(functions._list, component, weights, copy) @overload(object, (None, str, dict_of(str, str)), (None, Number, list_of(Number)), bool) def _enrich(self, function, component, weight, copy): if AdditionalIsFunction(function): function = ConvertAdditionalFunctionTypes(function) assert weight is None or isinstance(weight, Number) self._add_to_list(function, component, weight, copy) elif isinstance(function, list): converted_function = list() for function_i in function: if AdditionalIsFunction(function_i): converted_function.append( ConvertAdditionalFunctionTypes(function_i)) else: raise RuntimeError( "Invalid function provided to FunctionsList.enrich()" ) assert weight is None or isinstance(weight, list) self._enrich(converted_function, component, weight, copy) else: raise RuntimeError( "Invalid function provided to FunctionsList.enrich()") @overload(backend.Function.Type(), (None, str), (None, Number), bool) def _add_to_list(self, function, component, weight, copy): self._list.append( wrapping.function_extend_or_restrict(function, component, self.space, component, weight, copy)) @overload(backend.Function.Type(), dict_of(str, str), (None, Number), bool) def _add_to_list(self, function, component, weight, copy): assert len(component) == 1 for (component_from, component_to) in component.items(): break self._list.append( wrapping.function_extend_or_restrict(function, component_from, self.space, component_to, weight, copy)) def clear(self): self._list = list() # Reset precomputed slices self._precomputed_slices.clear() def save(self, directory, filename): self._save_Nmax(directory, filename) for (index, function) in enumerate(self._list): wrapping.function_save(function, directory, filename + "_" + str(index)) def _save_Nmax(self, directory, filename): def save_Nmax_task(): with open(os.path.join(str(directory), filename + ".length"), "w") as length: length.write(str(len(self._list))) parallel_io(save_Nmax_task, self.mpi_comm) def load(self, directory, filename): if len(self._list) > 0: # avoid loading multiple times return False Nmax = self._load_Nmax(directory, filename) for index in range(Nmax): function = backend.Function(self.space) wrapping.function_load(function, directory, filename + "_" + str(index)) self.enrich(function) return True def _load_Nmax(self, directory, filename): def load_Nmax_task(): with open(os.path.join(str(directory), filename + ".length"), "r") as length: return int(length.readline()) return parallel_io(load_Nmax_task, self.mpi_comm) @overload( online_backend.OnlineMatrix.Type(), ) def __mul__(self, other): return wrapping.functions_list_mul_online_matrix( self, other, type(self)) @overload( (online_backend.OnlineVector.Type(), ThetaType), ) def __mul__(self, other): return wrapping.functions_list_mul_online_vector(self, other) @overload( online_backend.OnlineFunction.Type(), ) def __mul__(self, other): return wrapping.functions_list_mul_online_vector( self, online_wrapping.function_to_vector(other)) def __len__(self): return len(self._list) @overload(int) def __getitem__(self, key): return self._list[key] @overload(slice) # e.g. key = :N, return the first N functions def __getitem__(self, key): if key.start is not None: start = key.start else: start = 0 assert key.step is None if key.stop is not None: stop = key.stop else: stop = len(self._list) assert start <= stop if start < stop: assert start >= 0 assert start < len(self._list) assert stop > 0 assert stop <= len(self._list) # elif start == stop # trivial case which will result in an empty FunctionsList if (start, stop) not in self._precomputed_slices: output = _FunctionsList.__new__(type(self), self.space) output.__init__(self.space) if start < stop: output._list = self._list[key] self._precomputed_slices[start, stop] = output return self._precomputed_slices[start, stop] @overload(int, backend.Function.Type()) def __setitem__(self, key, item): self._list[key] = item @overload(int, object) def __setitem__(self, key, item): if AdditionalIsFunction(item): item = ConvertAdditionalFunctionTypes(item) self._list[key] = item else: raise RuntimeError( "Invalid function provided to FunctionsList.__setitem__()") def __iter__(self): return self._list.__iter__()
class _AffineExpansionStorage(AbstractAffineExpansionStorage): def __init__(self, arg1, arg2): self._content = None self._precomputed_slices = Cache( ) # from tuple to AffineExpansionStorage self._smallest_key = None self._previous_key = None self._largest_key = None # Auxiliary storage for __getitem__ slicing self._component_name_to_basis_component_index = None # will be filled in in __setitem__, if required self._component_name_to_basis_component_length = None # will be filled in in __setitem__, if required # Initialize arguments from inputs self._init(arg1, arg2) @overload( (tuple_of(backend.Matrix.Type()), tuple_of(backend.Vector.Type())), None) def _init(self, arg1, arg2): self._content = AffineExpansionStorageContent_Base((len(arg1), ), dtype=object) self._smallest_key = 0 self._largest_key = len(arg1) - 1 for (i, arg1i) in enumerate(arg1): self[i] = arg1i @overload(int, None) def _init(self, arg1, arg2): self._content = AffineExpansionStorageContent_Base((arg1, ), dtype=object) self._smallest_key = 0 self._largest_key = arg1 - 1 @overload(int, int) def _init(self, arg1, arg2): self._content = AffineExpansionStorageContent_Base((arg1, arg2), dtype=object) self._smallest_key = (0, 0) self._largest_key = (arg1 - 1, arg2 - 1) def save(self, directory, filename): # Get full directory name full_directory = Folders.Folder( os.path.join(str(directory), filename)) full_directory.create() # Exit in the trivial case of empty affine expansion if self._content.size is 0: return # Initialize iterator it = AffineExpansionStorageContent_Iterator( self._content, flags=["c_index", "multi_index", "refs_ok"], op_flags=["readonly"]) # Save content item type and shape self._save_content_item_type_shape(self._content[it.multi_index], it, full_directory) # Save content self._save_content(self._content[it.multi_index], it, full_directory) # Save dicts self._save_dicts(full_directory) @overload(backend.Matrix.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("matrix", full_directory, "content_item_type") ContentItemShapeIO.save_file((item.M, item.N), full_directory, "content_item_shape") @overload(backend.Vector.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("vector", full_directory, "content_item_type") ContentItemShapeIO.save_file(item.N, full_directory, "content_item_shape") @overload(backend.Function.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("function", full_directory, "content_item_type") ContentItemShapeIO.save_file(item.N, full_directory, "content_item_shape") @overload(Number, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("scalar", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape") @overload(AbstractFunctionsList, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("functions_list", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape") @overload(AbstractBasisFunctionsMatrix, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("basis_functions_matrix", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape") @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content_item_type_shape(self, item, it, full_directory): ContentItemTypeIO.save_file("empty", full_directory, "content_item_type") ContentItemShapeIO.save_file(None, full_directory, "content_item_shape") @overload(backend.Matrix.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: wrapping.tensor_save(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(backend.Vector.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: wrapping.tensor_save(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(backend.Function.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: wrapping.function_save(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(Number, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: ScalarContentIO.save_file(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(AbstractFunctionsList, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index].save( full_directory, "content_item_" + str(it.index)) it.iternext() @overload(AbstractBasisFunctionsMatrix, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index].save( full_directory, "content_item_" + str(it.index)) it.iternext() @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder) def _save_content(self, item, it, full_directory): pass def _save_dicts(self, full_directory): DictIO.save_file(self._component_name_to_basis_component_index, full_directory, "component_name_to_basis_component_index") DictIO.save_file(self._component_name_to_basis_component_length, full_directory, "component_name_to_basis_component_length") def load(self, directory, filename): if self._content is not None: # avoid loading multiple times if self._content.size > 0: it = AffineExpansionStorageContent_Iterator( self._content, flags=["multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: if self._content[ it. multi_index] is not None: # ... but only if there is at least one element different from None if isinstance(self._content[it.multi_index], AbstractFunctionsList): if len( self._content[it.multi_index] ) > 0: # ... unless it is an empty FunctionsList return False elif isinstance(self._content[it.multi_index], AbstractBasisFunctionsMatrix): if sum( self._content[it.multi_index]. _component_name_to_basis_component_length .values() ) > 0: # ... unless it is an empty BasisFunctionsMatrix return False else: return False it.iternext() # Get full directory name full_directory = Folders.Folder( os.path.join(str(directory), filename)) # Exit in the trivial case of empty affine expansion if self._content.size is 0: return True # Load content item type and shape reference_item = self._load_content_item_type_shape(full_directory) # Initialize iterator it = AffineExpansionStorageContent_Iterator( self._content, flags=["c_index", "multi_index", "refs_ok"]) # Load content self._load_content(reference_item, it, full_directory) # Load dicts self._load_dicts(full_directory) # Reset precomputed slices self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice(reference_item) # Return return True def _load_content_item_type_shape(self, full_directory): assert ContentItemTypeIO.exists_file(full_directory, "content_item_type") content_item_type = ContentItemTypeIO.load_file( full_directory, "content_item_type") assert ContentItemShapeIO.exists_file(full_directory, "content_item_shape") assert content_item_type in ("matrix", "vector", "function", "scalar", "functions_list", "basis_functions_matrix", "empty") if content_item_type == "matrix": (M, N) = ContentItemShapeIO.load_file( full_directory, "content_item_shape", globals={"OnlineSizeDict": OnlineSizeDict}) return backend.Matrix(M, N) elif content_item_type == "vector": N = ContentItemShapeIO.load_file( full_directory, "content_item_shape", globals={"OnlineSizeDict": OnlineSizeDict}) return backend.Vector(N) elif content_item_type == "function": N = ContentItemShapeIO.load_file( full_directory, "content_item_shape", globals={"OnlineSizeDict": OnlineSizeDict}) return backend.Function(N) elif content_item_type == "scalar": return 0. elif content_item_type == "functions_list": # self._content has already been populated with empty items assert isinstance(self._content[self._smallest_key], AbstractFunctionsList) return self._content[self._smallest_key] elif content_item_type == "basis_functions_matrix": # self._content has already been populated with empty items assert isinstance(self._content[self._smallest_key], AbstractBasisFunctionsMatrix) return self._content[self._smallest_key] elif content_item_type == "empty": return None else: # impossible to arrive here anyway thanks to the assert raise ValueError("Invalid content item type.") @overload(backend.Matrix.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index] = wrapping.tensor_copy(item) wrapping.tensor_load(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(backend.Vector.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index] = wrapping.tensor_copy(item) wrapping.tensor_load(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(backend.Function.Type(), AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index] = wrapping.function_copy(item) wrapping.function_load(self._content[it.multi_index], full_directory, "content_item_" + str(it.index)) it.iternext() @overload(Number, AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index] = ScalarContentIO.load_file( full_directory, "content_item_" + str(it.index)) it.iternext() @overload(AbstractFunctionsList, AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index].load( full_directory, "content_item_" + str(it.index)) it.iternext() @overload(AbstractBasisFunctionsMatrix, AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): while not it.finished: self._content[it.multi_index].load( full_directory, "content_item_" + str(it.index)) it.iternext() @overload(None, AffineExpansionStorageContent_Iterator, Folders.Folder) def _load_content(self, item, it, full_directory): pass def _load_dicts(self, full_directory): assert DictIO.exists_file( full_directory, "component_name_to_basis_component_index") self._component_name_to_basis_component_index = DictIO.load_file( full_directory, "component_name_to_basis_component_index", globals={ "ComponentNameToBasisComponentIndexDict": ComponentNameToBasisComponentIndexDict }) assert DictIO.exists_file( full_directory, "component_name_to_basis_component_length") self._component_name_to_basis_component_length = DictIO.load_file( full_directory, "component_name_to_basis_component_length", globals={"OnlineSizeDict": OnlineSizeDict}) it = AffineExpansionStorageContent_Iterator( self._content, flags=["multi_index", "refs_ok"], op_flags=["readonly"]) while not it.finished: if self._component_name_to_basis_component_index is not None: self._content[ it. multi_index]._component_name_to_basis_component_index = self._component_name_to_basis_component_index if self._component_name_to_basis_component_length is not None: self._content[ it. multi_index]._component_name_to_basis_component_length = self._component_name_to_basis_component_length it.iternext() @overload( backend.Matrix.Type(), ) def _prepare_trivial_precomputed_slice(self, item): empty_slice = slice(None) slices = slice_to_array( item, (empty_slice, empty_slice), self._component_name_to_basis_component_length, self._component_name_to_basis_component_index) self._precomputed_slices[slices] = self @overload( backend.Vector.Type(), ) def _prepare_trivial_precomputed_slice(self, item): empty_slice = slice(None) slices = slice_to_array( item, empty_slice, self._component_name_to_basis_component_length, self._component_name_to_basis_component_index) self._precomputed_slices[slices] = self @overload( backend.Function.Type(), ) def _prepare_trivial_precomputed_slice(self, item): empty_slice = slice(None) slices = slice_to_array( item.vector, empty_slice, self._component_name_to_basis_component_length, self._component_name_to_basis_component_index) self._precomputed_slices[slices] = self @overload( Number, ) def _prepare_trivial_precomputed_slice(self, item): pass @overload( AbstractFunctionsList, ) def _prepare_trivial_precomputed_slice(self, item): pass @overload( AbstractBasisFunctionsMatrix, ) def _prepare_trivial_precomputed_slice(self, item): pass @overload( None, ) def _prepare_trivial_precomputed_slice(self, item): pass @overload( (slice, tuple_of(slice)), ) def __getitem__(self, key): """ return the subtensors of size "key" for every element in content. (e.g. submatrices [1:5,1:5] of the affine expansion of A) """ it = AffineExpansionStorageContent_Iterator( self._content, flags=["multi_index", "refs_ok"], op_flags=["readonly"]) slices = slice_to_array( self._content[it.multi_index], key, self._component_name_to_basis_component_length, self._component_name_to_basis_component_index) if slices in self._precomputed_slices: return self._precomputed_slices[slices] else: output = _AffineExpansionStorage.__new__( type(self), *self._content.shape) output.__init__(*self._content.shape) while not it.finished: # Slice content and assign output[it.multi_index] = self._do_slicing( self._content[it.multi_index], key) # Increment it.iternext() self._precomputed_slices[slices] = output return output @overload( (int, tuple_of(int)), ) def __getitem__(self, key): """ return the element at position "key" in the storage (e.g. q-th matrix in the affine expansion of A, q = 1 ... Qa) """ return self._content[key] @overload(backend.Matrix.Type(), (slice, tuple_of(slice))) def _do_slicing(self, item, key): return item[key] @overload(backend.Vector.Type(), (slice, tuple_of(slice))) def _do_slicing(self, item, key): return item[key] @overload(backend.Function.Type(), (slice, tuple_of(slice))) def _do_slicing(self, item, key): return backend.Function(item.vector()[key]) def __setitem__(self, key, item): assert not isinstance( key, slice ) # only able to set the element at position "key" in the storage # Check that __getitem__ is not random acces but called for increasing key and store current key self._assert_setitem_order(key) self._update_previous_key(key) # Store item self._content[key] = item # Reset attributes related to basis functions matrix if the size has changed if key == self._smallest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._component_name_to_basis_component_index = None self._component_name_to_basis_component_length = None # Also store attributes related to basis functions matrix for __getitem__ slicing assert isinstance( item, ( backend.Matrix.Type(), # output e.g. of Z^T*A*Z backend.Vector.Type(), # output e.g. of Z^T*F backend.Function.Type( ), # for initial conditions of unsteady problems Number, # output of Riesz_F^T*X*Riesz_F AbstractFunctionsList, # auxiliary storage of Riesz representors AbstractBasisFunctionsMatrix # auxiliary storage of Riesz representors )) if isinstance(item, backend.Function.Type()): item = item.vector() if isinstance(item, (backend.Matrix.Type(), backend.Vector.Type(), AbstractBasisFunctionsMatrix)): assert ( self._component_name_to_basis_component_index is None) == ( self._component_name_to_basis_component_length is None) if self._component_name_to_basis_component_index is None: self._component_name_to_basis_component_index = item._component_name_to_basis_component_index self._component_name_to_basis_component_length = item._component_name_to_basis_component_length else: assert self._component_name_to_basis_component_index == item._component_name_to_basis_component_index assert self._component_name_to_basis_component_length == item._component_name_to_basis_component_length else: assert self._component_name_to_basis_component_index is None assert self._component_name_to_basis_component_length is None # Reset and prepare precomputed slices if key == self._largest_key: # this assumes that __getitem__ is not random acces but called for increasing key self._precomputed_slices.clear() self._prepare_trivial_precomputed_slice(item) @overload(int) def _assert_setitem_order(self, current_key): if self._previous_key is None: assert current_key == 0 else: assert current_key == (self._previous_key + 1) % (self._largest_key + 1) @overload(int, int) def _assert_setitem_order(self, current_key_0, current_key_1): if self._previous_key is None: assert current_key_0 == 0 assert current_key_1 == 0 else: expected_key_1 = (self._previous_key[1] + 1) % (self._largest_key[1] + 1) if expected_key_1 is 0: expected_key_0 = (self._previous_key[0] + 1) % (self._largest_key[0] + 1) else: expected_key_0 = self._previous_key[0] assert current_key_0 == expected_key_0 assert current_key_1 == expected_key_1 @overload(tuple_of(int)) def _assert_setitem_order(self, current_key): self._assert_setitem_order(*current_key) @overload(int) def _update_previous_key(self, current_key): self._previous_key = current_key @overload(int, int) def _update_previous_key(self, current_key_0, current_key_1): self._previous_key = (current_key_0, current_key_1) @overload(tuple_of(int)) def _update_previous_key(self, current_key): self._update_previous_key(*current_key) def __iter__(self): return AffineExpansionStorageContent_Iterator( self._content, flags=["refs_ok"], op_flags=["readonly"]) def __len__(self): assert self.order() == 1 return self._content.size def order(self): assert self._content is not None return len(self._content.shape)
# GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with RBniCS. If not, see <http://www.gnu.org/licenses/>. # from rbnics.backends.online.basic import AffineExpansionStorage as BasicAffineExpansionStorage from rbnics.backends.online.numpy.copy import function_copy, tensor_copy from rbnics.backends.online.numpy.function import Function from rbnics.backends.online.numpy.matrix import Matrix from rbnics.backends.online.numpy.vector import Vector from rbnics.backends.online.numpy.wrapping import function_load, function_save, tensor_load, tensor_save from rbnics.utils.decorators import BackendFor, ModuleWrapper, tuple_of backend = ModuleWrapper(Function, Matrix, Vector) wrapping = ModuleWrapper(function_load, function_save, tensor_load, tensor_save, function_copy=function_copy, tensor_copy=tensor_copy) AffineExpansionStorage_Base = BasicAffineExpansionStorage(backend, wrapping) @BackendFor("numpy", inputs=((int, tuple_of(Matrix.Type()), tuple_of(Vector.Type())), (int, None))) class AffineExpansionStorage(AffineExpansionStorage_Base): def __init__(self, arg1, arg2=None): AffineExpansionStorage_Base.__init__(self, arg1, arg2)
def get_function_subspace(function_space: FunctionSpace, component: tuple_of(int)): return function_space.extract_sub_space(component).collapse()
# SPDX-License-Identifier: LGPL-3.0-or-later # from rbnics.backends.online.basic import evaluate as basic_evaluate # from rbnics.backends.online.numpy.function import Function from rbnics.backends.online.numpy.matrix import Matrix # from rbnics.backends.online.numpy.parametrized_expression_factory import ParametrizedExpressionFactory # from rbnics.backends.online.numpy.parametrized_tensor_factory import ParametrizedTensorFactory # from rbnics.backends.online.numpy.reduced_mesh import ReducedMesh # from rbnics.backends.online.numpy.reduced_vertices import ReducedVertices # from rbnics.backends.online.numpy.tensors_list import TensorsList from rbnics.backends.online.numpy.vector import Vector from rbnics.utils.decorators import backend_for, tuple_of # backend = ModuleWrapper(Function, FunctionsList, Matrix, ParametrizedExpressionFactory, ParametrizedTensorFactory, # ReducedMesh, ReducedVertices, TensorsList, Vector) # wrapping = ModuleWrapper(evaluate_and_vectorize_sparse_matrix_at_dofs, evaluate_sparse_function_at_dofs, # evaluate_sparse_vector_at_dofs, expression_on_reduced_mesh, expression_on_truth_mesh, # form_on_reduced_function_space, form_on_truth_function_space) # online_backend = ModuleWrapper(OnlineFunction=Function, OnlineMatrix=Matrix, OnlineVector=Vector) # online_wrapping = ModuleWrapper() # evaluate_base = basic_evaluate(backend, wrapping, online_backend, online_wrapping) evaluate_base = None # TODO # Evaluate a parametrized expression, possibly at a specific location @backend_for("numpy", inputs=((Matrix.Type(), Vector.Type()), (tuple_of(int), tuple_of(tuple_of(int)), None))) def evaluate(expression, at=None): return evaluate_base(expression, at)
# Copyright (C) 2015-2020 by the RBniCS authors # # This file is part of RBniCS. # # SPDX-License-Identifier: LGPL-3.0-or-later from rbnics.backends.online.basic import NonAffineExpansionStorage as BasicNonAffineExpansionStorage from rbnics.backends.online.numpy.matrix import Matrix from rbnics.backends.online.numpy.vector import Vector from rbnics.utils.decorators import BackendFor, tuple_of NonAffineExpansionStorage_Base = BasicNonAffineExpansionStorage @BackendFor("numpy", inputs=((int, tuple_of(Matrix.Type()), tuple_of(Vector.Type())), (int, None))) class NonAffineExpansionStorage(NonAffineExpansionStorage_Base): pass