def apply(self, U, mu=None):
        assert U in self.source
        result = self.range.empty(reserve=len(U))
        local_subdomains, num_local_subdomains, num_global_subdomains = _get_subdomains(
            self.grid)
        for u_i in range(len(U)):
            subdomain_uhs_with_global_support = \
                make_discrete_function(
                    self.block_space,
                    self.block_space.project_onto_neighborhood(
                        [U._list[u_i].impl if nn == self.subdomain else
                         Vector(self.block_space.local_space(nn).size(), 0.)
                         for nn in range(num_global_subdomains)],
                        [nn for nn in range(num_global_subdomains)]
                    )
                )

            reconstructed_uh_kk_with_global_support = make_discrete_function(
                self.global_rt_space)
            apply_diffusive_flux_reconstruction_in_neighborhood(
                self.grid, self.subdomain, self.lambda_xi, self.kappa,
                subdomain_uhs_with_global_support,
                reconstructed_uh_kk_with_global_support)

            blocks = [
                s.make_array([
                    self.subdomain_rt_spaces[ii].restrict(
                        reconstructed_uh_kk_with_global_support.vector_copy())
                ])  # NOQA
                for s, ii in zip(self.range.subspaces,
                                 self.grid.neighborhood_of(self.subdomain))
            ]
            result.append(self.range.make_array(blocks))

        return result
    def apply(self, U, mu=None):
        assert U in self.source
        results = self.range.empty(reserve=len(U))
        for u_i in range(len(U)):
            result = self.range.zeros()
            result._blocks[self.neighborhood.index(self.subdomain)].axpy(
                1, U[u_i])

            for i_ii, ii in enumerate(self.neighborhood):
                ii_neighborhood = self.grid.neighborhood_of(ii)
                ii_neighborhood_space = self.block_space.restricted_to_neighborhood(
                    ii_neighborhood)

                subdomain_uh_with_neighborhood_support = make_discrete_function(
                    ii_neighborhood_space,
                    ii_neighborhood_space.project_onto_neighborhood([
                        U._list[u_i].impl if nn == self.subdomain else Vector(
                            self.block_space.local_space(nn).size(), 0.)
                        for nn in ii_neighborhood
                    ], ii_neighborhood))

                interpolated_u_vector = ii_neighborhood_space.project_onto_neighborhood(
                    [
                        Vector(self.block_space.local_space(nn).size(), 0.)
                        for nn in ii_neighborhood
                    ], ii_neighborhood)
                interpolated_u = make_discrete_function(
                    ii_neighborhood_space, interpolated_u_vector)

                apply_oswald_interpolation_operator(
                    self.grid, ii,
                    make_subdomain_boundary_info(
                        self.grid,
                        {'type': 'xt.grid.boundaryinfo.alldirichlet'}),
                    subdomain_uh_with_neighborhood_support, interpolated_u)

                local_sizes = np.array([
                    ii_neighborhood_space.local_space(nn).size()
                    for nn in ii_neighborhood
                ])
                offsets = np.hstack(([0], np.cumsum(local_sizes)))
                ind = ii_neighborhood.index(ii)
                result._blocks[i_ii]._list[0].data[:] -= \
                    np.frombuffer(interpolated_u_vector)[offsets[ind]:offsets[ind+1]]
            results.append(result)

        return results
    def shape_functions(self, subdomain, order=0):
        assert 0 <= order <= 1
        local_space = self.solution_space.subspaces[subdomain]
        U = local_space.make_array([Vector(local_space.dim, 1.)])

        if order == 1:
            dune_local_space = self.visualizer.space.local_space(subdomain)
            tmp_discrete_function = make_discrete_function(dune_local_space)
            for expression in ('x[0]', 'x[1]', 'x[0]*x[1]'):
                func = make_expression_function_1x1(self.grid,
                                                    'x',
                                                    expression,
                                                    order=2)
                dune_project(func, tmp_discrete_function)
                U.append(
                    local_space.make_array(
                        [tmp_discrete_function.vector_copy()]))

        return U
    def solve_for_local_correction(self,
                                   subdomain,
                                   Us,
                                   mu=None,
                                   inverse_options=None):
        grid, local_boundary_info, affine_lambda, kappa, f, block_space = self.enrichment_data
        neighborhood = self.neighborhoods[subdomain]
        neighborhood_space = block_space.restricted_to_neighborhood(
            neighborhood)
        # Compute current solution restricted to the neighborhood to be usable as Dirichlet values for the correction
        # problem.
        current_solution = [U._list for U in Us]
        assert np.all(len(v) == 1 for v in current_solution)
        current_solution = [v[0].impl for v in current_solution]
        current_solution = neighborhood_space.project_onto_neighborhood(
            current_solution, neighborhood)
        current_solution = make_discrete_function(neighborhood_space,
                                                  current_solution)
        # Solve the local corrector problem.
        #   LHS
        ops = []
        for lambda_ in affine_lambda['functions']:
            ops.append(
                make_elliptic_swipdg_matrix_operator_on_neighborhood(
                    grid,
                    subdomain,
                    local_boundary_info,
                    neighborhood_space,
                    lambda_,
                    kappa,
                    over_integrate=0))
        ops_coeffs = affine_lambda['coefficients'].copy()
        #   RHS
        funcs = []

        # We don't have any boundary treatment right now. Things will probably
        # break in multiple ways in case of non-trivial boundary conditions,
        # so we can comment this out for now ..

        # for lambda_ in affine_lambda['functions']:
        #     funcs.append(make_elliptic_swipdg_vector_functional_on_neighborhood(
        #         grid, subdomain, local_boundary_info,
        #         neighborhood_space,
        #         current_solution, lambda_, kappa,
        #         over_integrate=0))
        # funcs_coeffs = affine_lambda['coefficients'].copy()
        funcs.append(
            make_l2_vector_functional_on_neighborhood(grid,
                                                      subdomain,
                                                      neighborhood_space,
                                                      f,
                                                      over_integrate=2))
        # funcs_coeffs.append(1.)
        funcs_coeffs = [1]
        #   assemble in one grid walk
        neighborhood_assembler = make_neighborhood_system_assembler(
            grid, subdomain, neighborhood_space)
        for op in ops:
            neighborhood_assembler.append(op)
        for func in funcs:
            neighborhood_assembler.append(func)
        neighborhood_assembler.assemble()
        # solve
        local_space_id = self.solution_space.subspaces[subdomain].id
        lhs = LincombOperator([
            DuneXTMatrixOperator(
                o.matrix(), source_id=local_space_id, range_id=local_space_id)
            for o in ops
        ], ops_coeffs)
        rhs = LincombOperator([
            VectorFunctional(lhs.range.make_array([v.vector()])) for v in funcs
        ], funcs_coeffs)
        correction = lhs.apply_inverse(rhs.as_source_array(mu),
                                       mu=mu,
                                       inverse_options=inverse_options)
        assert len(correction) == 1
        # restrict to subdomain
        local_sizes = [
            block_space.local_space(nn).size() for nn in neighborhood
        ]
        local_starts = [
            int(np.sum(local_sizes[:nn])) for nn in range(len(local_sizes))
        ]
        local_starts.append(neighborhood_space.mapper.size)
        localized_corrections_as_np = np.array(correction._list[0].impl,
                                               copy=False)
        localized_corrections_as_np = [
            localized_corrections_as_np[local_starts[nn]:local_starts[nn + 1]]
            for nn in range(len(local_sizes))
        ]
        subdomain_index_in_neighborhood = np.where(
            np.array(list(neighborhood)) == subdomain)[0]
        assert len(subdomain_index_in_neighborhood) == 1
        subdomain_index_in_neighborhood = subdomain_index_in_neighborhood[0]
        subdomain_correction = Vector(
            local_sizes[subdomain_index_in_neighborhood], 0.)
        subdomain_correction_as_np = np.array(subdomain_correction, copy=False)
        subdomain_correction_as_np[:] = localized_corrections_as_np[
            subdomain_index_in_neighborhood][:]
        return self.solution_space.subspaces[subdomain].make_array(
            [subdomain_correction])