def localize_to_subdomain_with_global_support(self, U, ss):
        assert len(U) == 1

        return make_discrete_function(
            self.block_space,
            self.block_space.project_onto_neighborhood(
                [U._list[0].impl if nn == ss else Vector(self.block_space.local_space(nn).size(), 0.)
                 for nn in range(self.grid.num_subdomains)],
                [nn for nn in range(self.grid.num_subdomains)]
            )
        )
    def _apply(self, U, mu=None):
        from dune.gdt import apply_oswald_interpolation_operator

        assert len(U) == 1
        assert U in self.source

        result = self.range.zeros()
        result._blocks[self.neighborhood.index(self.subdomain)].axpy(1, U)

        for i_ii, ii in enumerate(self.neighborhood):
            ii_neighborhood = self.grid.neighborhood_of(ii)
            ii_neighborhood_space = self.block_space.restricted_to_neighborhood(ii_neighborhood)

            subdomain_uh_with_neighborhood_support = make_discrete_function(
                ii_neighborhood_space,
                ii_neighborhood_space.project_onto_neighborhood(
                    [U._list[0].impl if nn == self.subdomain else Vector(self.block_space.local_space(nn).size(), 0.)
                     for nn in ii_neighborhood],
                    ii_neighborhood
                )
            )

            interpolated_u_vector = ii_neighborhood_space.project_onto_neighborhood(
                [Vector(self.block_space.local_space(nn).size(), 0.) for nn in ii_neighborhood], ii_neighborhood)
            interpolated_u = make_discrete_function(ii_neighborhood_space, interpolated_u_vector)

            apply_oswald_interpolation_operator(
                self.grid, ii,
                make_subdomain_boundary_info(self.grid, {'type': 'xt.grid.boundaryinfo.alldirichlet'}),
                subdomain_uh_with_neighborhood_support,
                interpolated_u
            )

            local_sizes = np.array([ii_neighborhood_space.local_space(nn).size() for nn in ii_neighborhood])
            offsets = np.hstack(([0], np.cumsum(local_sizes)))
            ind = ii_neighborhood.index(ii)
            result._blocks[i_ii]._list[0].data[:] -= np.frombuffer(interpolated_u_vector)[offsets[ind]:offsets[ind+1]]

        return result
    def shape_functions(self, subdomain, order=0):
        assert 0 <= order <= 1
        local_space = self.solution_space.subspaces[subdomain]
        U = local_space.make_array([Vector(local_space.dim, 1.)])

        if order == 1:
            from dune.gdt import make_discrete_function, project
            dune_local_space = self.visualizer.space.local_space(subdomain)
            tmp_discrete_function = make_discrete_function(dune_local_space)
            for expression in ('x[0]', 'x[1]', 'x[0]*x[1]'):
                func = make_expression_function_1x1(grid, 'x', expression, order=2)
                project(func, tmp_discrete_function)
                U.append(local_space.make_array([tmp_discrete_function.vector_copy()]))

        return U
    def _apply(self, U, mu=None):
        from dune.gdt import (
            RS2017_apply_diffusive_flux_reconstruction_in_neighborhood as apply_diffusive_flux_reconstruction_in_neighborhood
        )

        assert len(U) == 1
        assert U in self.source

        subdomain_uhs_with_global_support = self.localize_to_subdomain_with_global_support(U, self.kk)

        reconstructed_uh_kk_with_global_support = make_discrete_function(self.global_rt_space)
        apply_diffusive_flux_reconstruction_in_neighborhood(
            # self.grid, self.subdomain,
            self.grid, self.kk,
            self.lambda_xi_prime, self.kappa,
            subdomain_uhs_with_global_support,
            reconstructed_uh_kk_with_global_support)
        return self.range.make_array([reconstructed_uh_kk_with_global_support.vector_copy()])
 def solve_for_local_correction(self, subdomain, Us, mu=None):
     grid, local_boundary_info, affine_lambda, kappa, f, block_space = self.enrichment_data
     neighborhood = self.neighborhoods[subdomain]
     neighborhood_space = block_space.restricted_to_neighborhood(neighborhood)
     # Compute current solution restricted to the neighborhood to be usable as Dirichlet values for the correction
     # problem.
     current_solution = [U._list for U in Us]
     assert np.all(len(v) == 1 for v in current_solution)
     current_solution = [v[0].impl for v in current_solution]
     current_solution = neighborhood_space.project_onto_neighborhood(current_solution, neighborhood)
     current_solution = make_discrete_function(neighborhood_space, current_solution)
     # Solve the local corrector problem.
     #   LHS
     ops = []
     for lambda_ in affine_lambda['functions']:
         ops.append(make_elliptic_swipdg_matrix_operator_on_neighborhood(
             grid, subdomain, local_boundary_info,
             neighborhood_space,
             lambda_, kappa,
             over_integrate=0))
     ops_coeffs = affine_lambda['coefficients'].copy()
     #   RHS
     funcs = []
     for lambda_ in affine_lambda['functions']:
         funcs.append(make_elliptic_swipdg_vector_functional_on_neighborhood(
             grid, subdomain, local_boundary_info,
             neighborhood_space,
             current_solution, lambda_, kappa,
             over_integrate=0))
     funcs_coeffs = affine_lambda['coefficients'].copy()
     funcs.append(make_l2_vector_functional_on_neighborhood(
         grid, subdomain,
         neighborhood_space,
         f,
         over_integrate=0))
     funcs_coeffs.append(1.)
     #   assemble in one grid walk
     neighborhood_assembler = make_neighborhood_system_assembler(grid, subdomain, neighborhood_space)
     for op in ops:
         neighborhood_assembler.append(op)
     for func in funcs:
         neighborhood_assembler.append(func)
     neighborhood_assembler.assemble()
     # solve
     local_space_id = self.solution_space.subspaces[subdomain].id
     lhs = LincombOperator([DuneXTMatrixOperator(o.matrix(), source_id=local_space_id, range_id=local_space_id) for o in ops], ops_coeffs)
     rhs = LincombOperator([VectorFunctional(lhs.range.make_array([v.vector()])) for v in funcs], funcs_coeffs)
     correction = lhs.apply_inverse(rhs.as_source_array(mu), mu=mu)
     assert len(correction) == 1
     # restrict to subdomain
     local_sizes = [block_space.local_space(nn).size() for nn in neighborhood]
     local_starts = [int(np.sum(local_sizes[:nn])) for nn in range(len(local_sizes))]
     local_starts.append(neighborhood_space.mapper.size)
     localized_corrections_as_np = np.array(correction._list[0].impl, copy=False)
     localized_corrections_as_np = [localized_corrections_as_np[local_starts[nn]:local_starts[nn+1]] for nn in range(len(local_sizes))]
     subdomain_index_in_neighborhood = np.where(np.array(list(neighborhood)) == subdomain)[0]
     assert len(subdomain_index_in_neighborhood) == 1
     subdomain_index_in_neighborhood = subdomain_index_in_neighborhood[0]
     subdomain_correction = Vector(local_sizes[subdomain_index_in_neighborhood], 0.)
     subdomain_correction_as_np = np.array(subdomain_correction, copy=False)
     subdomain_correction_as_np[:] = localized_corrections_as_np[subdomain_index_in_neighborhood][:]
     return self.solution_space.subspaces[subdomain].make_array([subdomain_correction])