Exemplo n.º 1
0
def compute_tlm(parameter, forget=False):

    if isinstance(parameter, (list, tuple)):
        parameter = ListControl(parameter)

    for i in range(adjglobals.adjointer.equation_count):
        (tlm_var, output) = adjglobals.adjointer.get_tlm_solution(i, parameter)
        if output.data:
            output.data.rename(str(tlm_var), "a Function from dolfin-adjoint")

        storage = libadjoint.MemoryStorage(output)
        storage.set_overwrite(True)
        adjglobals.adjointer.record_variable(tlm_var, storage)

        yield (output.data, tlm_var)

        # forget is None: forget *nothing*.
        # forget is True: forget everything we can, forward and adjoint
        # forget is False: forget only unnecessary tlm values
        if forget is None:
            pass
        elif forget:
            adjglobals.adjointer.forget_tlm_equation(i)
        else:
            adjglobals.adjointer.forget_tlm_values(i)
Exemplo n.º 2
0
    def solve(self, annotate=None):
        '''To disable the annotation, just pass :py:data:`annotate=False` to this routine, and it acts exactly like the
        Dolfin solve call. This is useful in cases where the solve is known to be irrelevant or diagnostic
        for the purposes of the adjoint computation (such as projecting fields to other function spaces
        for the purposes of visualisation).'''

        annotate = utils.to_annotate(annotate)

        if annotate:
            problem = self.problem
            solving.annotate(problem.F == 0,
                             problem.u,
                             problem.bcs,
                             J=problem.J,
                             solver_parameters=compatibility.to_dict(
                                 self.parameters))

        out = backend.NonlinearVariationalSolver.solve(self)

        if annotate and backend.parameters["adjoint"]["record_all"]:
            adjglobals.adjointer.record_variable(
                adjglobals.adj_variables[self.problem.u],
                libadjoint.MemoryStorage(adjlinalg.Vector(self.problem.u)))

        return out
    def __call__(self, infunc, annotate=None):

        # Perform the forward operation
        out = Function(infunc.function_space())
        out.vector()[:] = out.vector().get_local()**2

        # Annotate the operation on the dolfin-adjoint tape
        if utils.to_annotate(annotate):

            rhs = CustomDolfinAdjointFunctionRhs(coeffs=[infunc])
            out_dep = adjglobals.adj_variables.next(out)

            solving.register_initial_conditions(zip(rhs.coefficients(),
                                                    rhs.dependencies()),
                                                linear=False)

            if parameters["adjoint"]["record_all"]:
                adjglobals.adjointer.record_variable(
                    out_dep,
                    libadjoint.MemoryStorage(adjlinalg.Vector(infunc)))

            identity = utils.get_identity_block(out.function_space())
            eq = libadjoint.Equation(out_dep,
                                     blocks=[identity],
                                     targets=[out_dep],
                                     rhs=rhs)
            cs = adjglobals.adjointer.register_equation(eq)

            solving.do_checkpoint(cs, out_dep, rhs)

        return out
Exemplo n.º 4
0
    def step(self, dt, annotate=None):

        to_annotate = utils.to_annotate(annotate)

        if to_annotate:
            scheme = self.scheme()
            var = scheme.solution()
            fn_space = var.function_space()

            current_var = adjglobals.adj_variables[var]
            if not adjglobals.adjointer.variable_known(current_var):
                solving.register_initial_conditions([(var, current_var)], linear=True)

            identity_block = utils.get_identity_block(fn_space)
            frozen_expressions = expressions.freeze_dict()
            frozen_constants = constant.freeze_dict()
            rhs = PointIntegralRHS(self, dt, current_var, frozen_expressions, frozen_constants)
            next_var = adjglobals.adj_variables.next(var)

            eqn = libadjoint.Equation(next_var, blocks=[identity_block], targets=[next_var], rhs=rhs)
            cs  = adjglobals.adjointer.register_equation(eqn)

        super(PointIntegralSolver, self).step(dt)

        if to_annotate:
            curtime = float(scheme.t())
            scheme.t().assign(curtime) # so that d-a sees the time update, which is implict in step

            solving.do_checkpoint(cs, next_var, rhs)

            if dolfin.parameters["adjoint"]["record_all"]:
                adjglobals.adjointer.record_variable(next_var, libadjoint.MemoryStorage(adjlinalg.Vector(var)))
Exemplo n.º 5
0
def dolfin_adjoint_interpolate(self, other, annotate=None):
    out = dolfin_interpolate(self, other)
    if annotate is True:
        assignment.register_assign(self, other, op=backend.interpolate)
        adjglobals.adjointer.record_variable(
            adjglobals.adj_variables[self],
            libadjoint.MemoryStorage(adjlinalg.Vector(self)))

    return out
 def solve(self, *args, **kwargs):
     # Annotate an assignment solve
     record = da_annotate_equation_solve(self)
     annotate = not dolfin.parameters["adjoint"]["stop_annotating"]
     dolfin.parameters["adjoint"]["stop_annotating"] = True
     ret = solve_orig(self, *args, **kwargs)
     dolfin.parameters["adjoint"]["stop_annotating"] = not annotate
     if record:
         x = unwrap_fns(self.x())
         adjglobals.adjointer.record_variable(
             adjglobals.adj_variables[x],
             libadjoint.MemoryStorage(adjlinalg.Vector(x)))
     return ret
Exemplo n.º 7
0
def interpolate(v, V, annotate=None, name=None):
    '''The interpolate call changes Function data, and so it too must be annotated so that the
    adjoint and tangent linear models may be constructed automatically by libadjoint.

    To disable the annotation of this function, just pass :py:data:`annotate=False`. This is useful in
    cases where the interpolation is known to be irrelevant or diagnostic for the purposes of the adjoint
    computation (such as interpolating fields to other function spaces for the purposes of
    visualisation).'''

    out = backend.interpolate(v, V)
    out = utils.function_to_da_function(out)
    if name is not None:
        out.adj_name = name

    to_annotate = utils.to_annotate(annotate)

    if to_annotate:
        if isinstance(v, backend.Function):
            rhsdep = adjglobals.adj_variables[v]
            if adjglobals.adjointer.variable_known(rhsdep):
                rhs = InterpolateRHS(v, V)
                identity_block = utils.get_identity_block(V)

                solving.register_initial_conditions(zip(
                    rhs.coefficients(), rhs.dependencies()),
                                                    linear=True)

                dep = adjglobals.adj_variables.next(out)

                if backend.parameters["adjoint"]["record_all"]:
                    adjglobals.adjointer.record_variable(
                        dep, libadjoint.MemoryStorage(adjlinalg.Vector(out)))

                initial_eq = libadjoint.Equation(dep,
                                                 blocks=[identity_block],
                                                 targets=[dep],
                                                 rhs=rhs)
                cs = adjglobals.adjointer.register_equation(initial_eq)

                solving.do_checkpoint(cs, dep, rhs)

        elif annotate is not None:
            outvar = adjglobals.adj_variables[out]
            solving.register_initial_conditions([
                [out, outvar],
            ], linear=True)

    return out
Exemplo n.º 8
0
def tlm_dolfin(control, forget=False):
    for i in range(adjglobals.adjointer.equation_count):
        (tlm_var, output) = adjglobals.adjointer.get_tlm_solution(i, control)

        storage = libadjoint.MemoryStorage(output)
        storage.set_overwrite(True)
        adjglobals.adjointer.record_variable(tlm_var, storage)

        if forget is None:
            pass
        elif forget:
            adjglobals.adjointer.forget_tlm_equation(i)
        else:
            adjglobals.adjointer.forget_tlm_values(i)

    return output
Exemplo n.º 9
0
def compute_adjoint(functional, forget=True, ignore=[]):

    ignorelist = []
    for fn in ignore:
        if isinstance(fn, backend.Function):
            ignorelist.append(adjglobals.adj_variables[fn])
        elif isinstance(fn, str):
            ignorelist.append(libadjoint.Variable(fn, 0, 0))
        else:
            ignorelist.append(fn)

    for i in range(adjglobals.adjointer.timestep_count):
        adjglobals.adjointer.set_functional_dependencies(functional, i)

    for i in range(adjglobals.adjointer.equation_count)[::-1]:
        fwd_var = adjglobals.adjointer.get_forward_variable(i)
        if fwd_var in ignorelist:
            info("Ignoring the adjoint equation for %s" % fwd_var)
            continue

        (adj_var,
         output) = adjglobals.adjointer.get_adjoint_solution(i, functional)
        if output.data:
            if backend.__name__ == "dolfin":
                output.data.rename(str(adj_var),
                                   "a Function from dolfin-adjoint")
            else:
                output.data.name = str(adj_var)

        storage = libadjoint.MemoryStorage(output)
        storage.set_overwrite(True)
        adjglobals.adjointer.record_variable(adj_var, storage)

        # forget is None: forget *nothing*.
        # forget is True: forget everything we can, forward and adjoint
        # forget is False: forget only unnecessary adjoint values
        if forget is None:
            pass
        elif forget:
            adjglobals.adjointer.forget_adjoint_equation(i)
        else:
            adjglobals.adjointer.forget_adjoint_values(i)

        yield (output.data, adj_var)
Exemplo n.º 10
0
def annotate_slope_limit(f):
    # First annotate the equation
    adj_var = adjglobals.adj_variables[f]
    rhs = SlopeRHS(f)

    adj_var_next = adjglobals.adj_variables.next(f)

    identity_block = solving.get_identity(f.function_space())

    eq = libadjoint.Equation(adj_var_next,
                             blocks=[identity_block],
                             targets=[adj_var_next],
                             rhs=rhs)
    cs = adjglobals.adjointer.register_equation(eq)

    # Record the result
    adjglobals.adjointer.record_variable(
        adjglobals.adj_variables[f],
        libadjoint.MemoryStorage(adjlinalg.Vector(f)))
 def solve(self, *args, **kwargs):
     # Annotate an equation solve
     record = da_annotate_equation_solve(self)
     # The EquationSolver solve method could do a lot of work, including
     # further solves. Temporarily disable annotation during the solve ...
     annotate = not dolfin.parameters["adjoint"]["stop_annotating"]
     dolfin.parameters["adjoint"]["stop_annotating"] = True
     ret = solve_orig(self, *args, **kwargs)
     # ... and then restore the previous annotation status.
     dolfin.parameters["adjoint"]["stop_annotating"] = not annotate
     if record:
         # We still want to wrap functions with WrappedFunction so that we can for
         # example write separate equations for u[0] and u[n]. However
         # dolfin-adjoint needs to treat these as the same Function, so
         # strategically unwrap WrappedFunction s.
         x = unwrap_fns(self.x())
         adjglobals.adjointer.record_variable(
             adjglobals.adj_variables[x],
             libadjoint.MemoryStorage(adjlinalg.Vector(x)))
     return ret
Exemplo n.º 12
0
        def assign(self, receiving, giving, annotate=None):

            out = backend.FunctionAssigner.assign(self, receiving, giving)
            to_annotate = utils.to_annotate(annotate)

            if to_annotate:
                # Receiving is always a single Function, or a single Function.sub(foo).sub(bar)

                # If the user passes in v.sub(0) to this function, we need to get a handle on v;
                # fetch that now
                receiving_super = get_super_function(receiving)
                receiving_fnspace = receiving_super.function_space()
                receiving_identity = utils.get_identity_block(
                    receiving_fnspace)
                receiving_idx = get_super_idx(receiving)

                rhs = FunctionAssignerRHS(self, self.adj_function_assigner,
                                          receiving_super, receiving_idx,
                                          giving)
                receiving_dep = adjglobals.adj_variables.next(receiving_super)

                solving.register_initial_conditions(zip(
                    rhs.coefficients(), rhs.dependencies()),
                                                    linear=True)
                if backend.parameters["adjoint"]["record_all"]:
                    adjglobals.adjointer.record_variable(
                        receiving_dep,
                        libadjoint.MemoryStorage(
                            adjlinalg.Vector(receiving_super)))

                eq = libadjoint.Equation(receiving_dep,
                                         blocks=[receiving_identity],
                                         targets=[receiving_dep],
                                         rhs=rhs)
                cs = adjglobals.adjointer.register_equation(eq)

                solving.do_checkpoint(cs, receiving_dep, rhs)

            return out
Exemplo n.º 13
0
    def solve(self, *args, **kwargs):
        '''To disable the annotation, just pass :py:data:`annotate=False` to this routine, and it acts exactly like the
        Dolfin solve call. This is useful in cases where the solve is known to be irrelevant or diagnostic
        for the purposes of the adjoint computation (such as projecting fields to other function spaces
        for the purposes of visualisation).'''

        to_annotate = utils.to_annotate(kwargs.pop("annotate", None))

        if to_annotate:
            factory = args[0]
            vec = args[1]
            b = dolfin.as_backend_type(vec).__class__()

            factory.F(b=b, x=vec)

            F = b.form
            bcs = b.bcs

            u = vec.function
            var = adjglobals.adj_variables[u]

            solving.annotate(
                F == 0,
                u,
                bcs,
                solver_parameters={"newton_solver": self.parameters.to_dict()})

        newargs = [self] + list(args)
        out = dolfin.NewtonSolver.solve(*newargs, **kwargs)

        if to_annotate and dolfin.parameters["adjoint"]["record_all"]:
            adjglobals.adjointer.record_variable(
                adjglobals.adj_variables[u],
                libadjoint.MemoryStorage(adjlinalg.Vector(u)))

        return out
Exemplo n.º 14
0
def register_assign(new, old, op=None):

    if not isinstance(old, backend.Function):
        assert op is not None

    fn_space = new.function_space()
    identity_block = utils.get_identity_block(fn_space)
    dep = adjglobals.adj_variables.next(new)

    if backend.parameters["adjoint"]["record_all"] and isinstance(
            old, backend.Function):
        adjglobals.adjointer.record_variable(
            dep, libadjoint.MemoryStorage(adjlinalg.Vector(old)))

    rhs = IdentityRHS(old, fn_space, op)
    register_initial_conditions(zip(rhs.coefficients(), rhs.dependencies()),
                                linear=True)
    initial_eq = libadjoint.Equation(dep,
                                     blocks=[identity_block],
                                     targets=[dep],
                                     rhs=rhs)
    cs = adjglobals.adjointer.register_equation(initial_eq)

    do_checkpoint(cs, dep, rhs)
Exemplo n.º 15
0
def replay_dolfin(forget=False, tol=0.0, stop=False):

    with misc.annotations(False):
        if not backend.parameters["adjoint"]["record_all"]:
            info_red(
                "Warning: your replay test will be much more effective with dolfin.parameters['adjoint']['record_all'] = True."
            )

        success = True
        for i in range(adjglobals.adjointer.equation_count):
            (fwd_var, output) = adjglobals.adjointer.get_forward_solution(i)
            storage = libadjoint.MemoryStorage(output)
            storage.set_compare(tol=tol)
            storage.set_overwrite(True)
            out = adjglobals.adjointer.record_variable(fwd_var, storage)
            success = success and out

            if forget:
                adjglobals.adjointer.forget_forward_equation(i)

            if not out and stop:
                break

    return success
Exemplo n.º 16
0
    def solve_local(self, x_vec, b_vec, b_dofmap, **kwargs):
        # Figure out whether to annotate or not
        to_annotate = utils.to_annotate(kwargs.pop("annotate", None))
        x = x_vec.function

        if to_annotate:
            L = b_vec.form

            # Set Matrix class for solving the adjoint systems
            solving.annotate(self.a == L, x, \
                            solver_parameters={"solver_type": self.solver_type, "factorize" : self.adjoint_factorize}, \
                            matrix_class=LocalSolverMatrix)

        # Use standard local solver
        out = dolfin.LocalSolver.solve_local(self, x_vec, b_vec, b_dofmap)

        if to_annotate:
            # checkpointing
            if dolfin.parameters["adjoint"]["record_all"]:
                adjglobals.adjointer.record_variable(
                    adjglobals.adj_variables[x],
                    libadjoint.MemoryStorage(adjlinalg.Vector(x)))

        return out
Exemplo n.º 17
0
    def solve(self, *args, **kwargs):

        timer = backend.Timer("Matrix-free solver")

        annotate = True
        if "annotate" in kwargs:
            annotate = kwargs["annotate"]
            del kwargs["annotate"]

        if len(args) == 3:
            A = args[0]
            x = args[1]
            b = args[2]
        elif len(args) == 2:
            A = self.operators[0]
            x = args[0]
            b = args[1]

        if annotate:
            if not isinstance(A, AdjointKrylovMatrix):
                try:
                    A = AdjointKrylovMatrix(A.form)
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs(
                        "Your A has to either be an AdjointKrylovMatrix or have been assembled after backend_adjoint was imported."
                    )

            if not hasattr(x, 'function'):
                raise libadjoint.exceptions.LibadjointErrorInvalidInputs(
                    "Your x has to come from code like down_cast(my_function.vector())."
                )

            if not hasattr(b, 'form'):
                raise libadjoint.exceptions.LibadjointErrorInvalidInputs(
                    "Your b has to have the .form attribute: was it assembled with from backend_adjoint import *?"
                )

            if not hasattr(A, 'dependencies'):
                backend.info_red(
                    "A has no .dependencies method; assuming no nonlinear dependencies of the matrix-free operator."
                )
                coeffs = []
                dependencies = []
            else:
                coeffs = [
                    coeff for coeff in A.dependencies()
                    if hasattr(coeff, 'function_space')
                ]
                dependencies = [
                    adjglobals.adj_variables[coeff] for coeff in coeffs
                ]

            if len(dependencies) > 0:
                assert hasattr(
                    A, "set_dependencies"
                ), "Need a set_dependencies method to replace your values, if you have nonlinear dependencies ... "

            rhs = adjrhs.RHS(b.form)

            key = '{}{}'.format(hash(A), random.random()).encode('utf8')
            diag_name = hashlib.md5(key).hexdigest()
            diag_block = libadjoint.Block(
                diag_name,
                dependencies=dependencies,
                test_hermitian=backend.parameters["adjoint"]["test_hermitian"],
                test_derivative=backend.parameters["adjoint"]
                ["test_derivative"])

            solving.register_initial_conditions(
                zip(rhs.coefficients(), rhs.dependencies()) +
                zip(coeffs, dependencies),
                linear=False,
                var=None)

            var = adjglobals.adj_variables.next(x.function)

            frozen_expressions_dict = expressions.freeze_dict()
            frozen_parameters = self.parameters.to_dict()

            def diag_assembly_cb(dependencies, values, hermitian, coefficient,
                                 context):
                '''This callback must conform to the libadjoint Python block assembly
                interface. It returns either the form or its transpose, depending on
                the value of the logical hermitian.'''

                assert coefficient == 1

                expressions.update_expressions(frozen_expressions_dict)

                if len(dependencies) > 0:
                    A.set_dependencies(dependencies,
                                       [val.data for val in values])

                if hermitian:
                    A_transpose = A.hermitian()
                    return (MatrixFree(
                        A_transpose,
                        fn_space=x.function.function_space(),
                        bcs=A_transpose.bcs,
                        solver_parameters=self.solver_parameters,
                        operators=transpose_operators(self.operators),
                        parameters=frozen_parameters),
                            adjlinalg.Vector(
                                None, fn_space=x.function.function_space()))
                else:
                    return (MatrixFree(
                        A,
                        fn_space=x.function.function_space(),
                        bcs=b.bcs,
                        solver_parameters=self.solver_parameters,
                        operators=self.operators,
                        parameters=frozen_parameters),
                            adjlinalg.Vector(
                                None, fn_space=x.function.function_space()))

            diag_block.assemble = diag_assembly_cb

            def diag_action_cb(dependencies, values, hermitian, coefficient,
                               input, context):
                expressions.update_expressions(frozen_expressions_dict)
                A.set_dependencies(dependencies, [val.data for val in values])

                if hermitian:
                    acting_mat = A.transpose()
                else:
                    acting_mat = A

                output_fn = backend.Function(input.data.function_space())
                acting_mat.mult(input.data.vector(), output_fn.vector())
                vec = output_fn.vector()
                for i in range(len(vec)):
                    vec[i] = coefficient * vec[i]

                return adjlinalg.Vector(output_fn)

            diag_block.action = diag_action_cb

            if len(dependencies) > 0:

                def derivative_action(dependencies, values, variable,
                                      contraction_vector, hermitian, input,
                                      coefficient, context):
                    expressions.update_expressions(frozen_expressions_dict)
                    A.set_dependencies(dependencies,
                                       [val.data for val in values])

                    action = A.derivative_action(
                        values[dependencies.index(variable)].data,
                        contraction_vector.data, hermitian, input.data,
                        coefficient)
                    return adjlinalg.Vector(action)

                diag_block.derivative_action = derivative_action

            eqn = libadjoint.Equation(var,
                                      blocks=[diag_block],
                                      targets=[var],
                                      rhs=rhs)
            cs = adjglobals.adjointer.register_equation(eqn)
            solving.do_checkpoint(cs, var, rhs)

        out = backend.PETScKrylovSolver.solve(self, *args)

        if annotate:
            if backend.parameters["adjoint"]["record_all"]:
                adjglobals.adjointer.record_variable(
                    var,
                    libadjoint.MemoryStorage(adjlinalg.Vector(x.function)))

        timer.stop()

        return out
Exemplo n.º 18
0
    def solve(self, *args, **kwargs):
        '''To disable the annotation, just pass :py:data:`annotate=False` to this routine, and it acts exactly like the
        Dolfin solve call. This is useful in cases where the solve is known to be irrelevant or diagnostic
        for the purposes of the adjoint computation (such as projecting fields to other function spaces
        for the purposes of visualisation).'''

        to_annotate = utils.to_annotate(kwargs.pop("annotate", None))

        if to_annotate:
            if len(args) == 3:
                A = args[0]
                x = args[1]
                b = args[2]
            elif len(args) == 2:
                A = self.operators[0]
                x = args[0]
                b = args[1]

            bcs = []
            if hasattr(A, 'bcs'):
                bcs += A.bcs
            if hasattr(b, 'bcs'):
                bcs += b.bcs
            bcs = misc.uniq(bcs)

            assemble_system = A.assemble_system

            A = A.form
            u = x.function
            b = b.form

            if self.operators[1] is not None:
                P = self.operators[1].form
            else:
                P = None

            solver_parameters = self.solver_parameters
            parameters = self.parameters.to_dict()
            fn_space = u.function_space()
            has_preconditioner = P is not None
            nsp = self.nsp
            tnsp = self.tnsp

            if nsp is not None:
                msg = """
                The transpose nullspace is not set.

                The nullspace of the KrylovSolver is set. In this case,
                the transpose nullspace must also be set, use:

                  solver.set_transpose_nullspace(nullspace)
                """
                assert tnsp is not None, msg

            if self.__global_list_idx__ is None:
                self.__global_list_idx__ = len(krylov_solvers)
                krylov_solvers.append(self)
                adj_krylov_solvers.append(None)
            idx = self.__global_list_idx__

            class KrylovSolverMatrix(adjlinalg.Matrix):
                def __init__(selfmat, *args, **kwargs):
                    if 'initial_guess' in kwargs:
                        selfmat.initial_guess = kwargs['initial_guess']
                        del kwargs['initial_guess']
                    else:
                        selfmat.initial_guess = None

                    replace_map = kwargs['replace_map']
                    del kwargs['replace_map']

                    adjlinalg.Matrix.__init__(selfmat, *args, **kwargs)

                    selfmat.adjoint = kwargs['adjoint']
                    if P is None:
                        selfmat.operators = (dolfin.replace(A,
                                                            replace_map), None)
                    else:
                        selfmat.operators = (dolfin.replace(A, replace_map),
                                             dolfin.replace(P, replace_map))

                def axpy(selfmat, alpha, x):
                    raise libadjoint.exceptions.LibadjointErrorNotImplemented(
                        "Shouldn't ever get here")

                def solve(selfmat, var, b):
                    if selfmat.adjoint:
                        operators = transpose_operators(selfmat.operators)
                    else:
                        operators = selfmat.operators

                    # Fetch/construct the solver
                    if var.type in ['ADJ_FORWARD', 'ADJ_TLM']:
                        solver = krylov_solvers[idx]
                        need_to_set_operator = self._need_to_reset_operator
                    else:
                        if adj_krylov_solvers[idx] is None:
                            need_to_set_operator = True
                            adj_krylov_solvers[idx] = KrylovSolver(
                                *solver_parameters)
                        else:
                            need_to_set_operator = self._need_to_reset_operator
                        solver = adj_krylov_solvers[idx]
                    solver.parameters.update(parameters)
                    self._need_to_reset_operator = False

                    if selfmat.adjoint:
                        (nsp_, tnsp_) = (tnsp, nsp)
                    else:
                        (nsp_, tnsp_) = (nsp, tnsp)

                    x = dolfin.Function(fn_space)
                    if selfmat.initial_guess is not None and var.type == 'ADJ_FORWARD':
                        x.vector()[:] = selfmat.initial_guess.vector()

                    if b.data is None:
                        dolfin.info_red(
                            "Warning: got zero RHS for the solve associated with variable %s"
                            % var)
                        return adjlinalg.Vector(x)

                    if var.type in ['ADJ_TLM', 'ADJ_ADJOINT']:
                        selfmat.bcs = [
                            utils.homogenize(bc) for bc in selfmat.bcs
                            if isinstance(bc, dolfin.cpp.DirichletBC)
                        ] + [
                            bc for bc in selfmat.bcs
                            if not isinstance(bc, dolfin.cpp.DirichletBC)
                        ]

                    # This is really hideous. Sorry.
                    if isinstance(b.data, dolfin.Function):
                        rhs = b.data.vector().copy()
                        [bc.apply(rhs) for bc in selfmat.bcs]

                        if need_to_set_operator:
                            if assemble_system:  # if we called assemble_system, rather than assemble
                                v = dolfin.TestFunction(fn_space)
                                (A, rhstmp) = dolfin.assemble_system(
                                    operators[0],
                                    dolfin.inner(b.data, v) * dolfin.dx,
                                    selfmat.bcs)
                                if has_preconditioner:
                                    (P, rhstmp) = dolfin.assemble_system(
                                        operators[1],
                                        dolfin.inner(b.data, v) * dolfin.dx,
                                        selfmat.bcs)
                                    solver.set_operators(A, P)
                                else:
                                    solver.set_operator(A)
                            else:  # we called assemble
                                A = dolfin.assemble(operators[0])
                                [bc.apply(A) for bc in selfmat.bcs]
                                if has_preconditioner:
                                    P = dolfin.assemble(operators[1])
                                    [bc.apply(P) for bc in selfmat.bcs]
                                    solver.set_operators(A, P)
                                else:
                                    solver.set_operator(A)
                    else:

                        if assemble_system:  # if we called assemble_system, rather than assemble
                            (A, rhs) = dolfin.assemble_system(
                                operators[0], b.data, selfmat.bcs)
                            if need_to_set_operator:
                                if has_preconditioner:
                                    (P, rhstmp) = dolfin.assemble_system(
                                        operators[1], b.data, selfmat.bcs)
                                    solver.set_operators(A, P)
                                else:
                                    solver.set_operator(A)
                        else:  # we called assemble
                            A = dolfin.assemble(operators[0])
                            rhs = dolfin.assemble(b.data)
                            [bc.apply(A) for bc in selfmat.bcs]
                            [bc.apply(rhs) for bc in selfmat.bcs]
                            if need_to_set_operator:
                                if has_preconditioner:
                                    P = dolfin.assemble(operators[1])
                                    [bc.apply(P) for bc in selfmat.bcs]
                                    solver.set_operators(A, P)
                                else:
                                    solver.set_operator(A)

                    # Set the nullspace for the linear operator
                    if nsp_ is not None and need_to_set_operator:
                        dolfin.as_backend_type(A).set_nullspace(nsp_)

                    # (Possibly override the user in) orthogonalize
                    # the right-hand-side
                    if tnsp_ is not None:
                        tnsp_.orthogonalize(rhs)

                    solver.solve(x.vector(), rhs)
                    return adjlinalg.Vector(x)

            solving.annotate(A == b,
                             u,
                             bcs,
                             matrix_class=KrylovSolverMatrix,
                             initial_guess=parameters['nonzero_initial_guess'],
                             replace_map=True)

        out = dolfin.KrylovSolver.solve(self, *args, **kwargs)

        if to_annotate and dolfin.parameters["adjoint"]["record_all"]:
            adjglobals.adjointer.record_variable(
                adjglobals.adj_variables[u],
                libadjoint.MemoryStorage(adjlinalg.Vector(u)))

        return out
    def __call__(self, value):
        """ Evaluates the reduced functional for the given control value.

	Args:
	    value: The point in control space where to perform the Taylor test. Must be of the same type as the Control (e.g. Function, Constant or lists of latter).

	Returns:
	    float: The functional value.
        """

        # Make sure we do not annotate

        # Reset any cached data in dolfin-adjoint
        adj_reset_cache()

        #: The control values at which the reduced functional is to be evaluated.
        value = enlist(value)

        # Call callback
        self.eval_cb_pre(delist(value, list_type=self.controls))

        # Update the control values on the tape
        ListControl(self.controls).update(value)

        # Check if the result is already cached
        if self.cache:
            hash = value_hash(value)
            if hash in self._cache["functional_cache"]:
                # Found a cache
                info_green("Got a functional cache hit")
                return self._cache["functional_cache"][hash]

        # Replay the annotation and evaluate the functional
        func_value = 0.
        for i in range(adjointer.equation_count):
            (fwd_var, output) = adjointer.get_forward_solution(i)
            if isinstance(output.data, Function):
                output.data.rename(str(fwd_var),
                                   "a Function from dolfin-adjoint")

            # Call callback
            self.replay_cb(fwd_var, output.data,
                           delist(value, list_type=self.controls))

            # Check if we checkpointing is active and if yes
            # record the exact same checkpoint variables as
            # in the initial forward run
            if adjointer.get_checkpoint_strategy() != None:
                if str(fwd_var) in mem_checkpoints:
                    storage = libadjoint.MemoryStorage(output, cs=True)
                    storage.set_overwrite(True)
                    adjointer.record_variable(fwd_var, storage)
                if str(fwd_var) in disk_checkpoints:
                    storage = libadjoint.MemoryStorage(output)
                    adjointer.record_variable(fwd_var, storage)
                    storage = libadjoint.DiskStorage(output, cs=True)
                    storage.set_overwrite(True)
                    adjointer.record_variable(fwd_var, storage)
                if not str(fwd_var) in mem_checkpoints and not str(
                        fwd_var) in disk_checkpoints:
                    storage = libadjoint.MemoryStorage(output)
                    storage.set_overwrite(True)
                    adjointer.record_variable(fwd_var, storage)

            # No checkpointing, so we record everything
            else:
                storage = libadjoint.MemoryStorage(output)
                storage.set_overwrite(True)
                adjointer.record_variable(fwd_var, storage)

            if i == adjointer.timestep_end_equation(fwd_var.timestep):
                func_value += adjointer.evaluate_functional(
                    self.functional, fwd_var.timestep)
                if adjointer.get_checkpoint_strategy() != None:
                    adjointer.forget_forward_equation(i)

        self.current_func_value = func_value

        # Call callback
        self.eval_cb_post(self.scale * func_value,
                          delist(value, list_type=self.controls))

        if self.cache:
            # Add result to cache
            info_red("Got a functional cache miss")
            self._cache["functional_cache"][hash] = self.scale * func_value

        return self.scale * func_value
Exemplo n.º 20
0
def project_dolfin(v,
                   V=None,
                   bcs=None,
                   mesh=None,
                   solver_type="lu",
                   preconditioner_type="default",
                   form_compiler_parameters=None,
                   annotate=None,
                   name=None):
    '''The project call performs an equation solve, and so it too must be annotated so that the
    adjoint and tangent linear models may be constructed automatically by libadjoint.

    To disable the annotation of this function, just pass :py:data:`annotate=False`. This is useful in
    cases where the solve is known to be irrelevant or diagnostic for the purposes of the adjoint
    computation (such as projecting fields to other function spaces for the purposes of
    visualisation).'''

    to_annotate = utils.to_annotate(annotate)

    if isinstance(v, backend.Expression) and (annotate is not True):
        to_annotate = False

    if isinstance(v, backend.Constant) and (annotate is not True):
        to_annotate = False

    out = backend.project(v=v,
                          V=V,
                          bcs=bcs,
                          mesh=mesh,
                          solver_type=solver_type,
                          preconditioner_type=preconditioner_type,
                          form_compiler_parameters=form_compiler_parameters)
    out = utils.function_to_da_function(out)

    if name is not None:
        out.adj_name = name
        out.rename(name, "a Function from dolfin-adjoint")

    if to_annotate:
        # reproduce the logic from project. This probably isn't future-safe, but anyway

        if V is None:
            V = backend.fem.projection._extract_function_space(v, mesh)
        if mesh is None:
            mesh = V.mesh()

        # Define variational problem for projection
        w = backend.TestFunction(V)
        Pv = backend.TrialFunction(V)
        a = backend.inner(w, Pv) * backend.dx(domain=mesh)
        L = backend.inner(w, v) * backend.dx(domain=mesh)

        solving.annotate(a == L,
                         out,
                         bcs,
                         solver_parameters={
                             "linear_solver": solver_type,
                             "preconditioner": preconditioner_type,
                             "symmetric": True
                         })

        if backend.parameters["adjoint"]["record_all"]:
            adjglobals.adjointer.record_variable(
                adjglobals.adj_variables[out],
                libadjoint.MemoryStorage(adjlinalg.Vector(out)))

    return out
Exemplo n.º 21
0
    def solve(self, *args, **kwargs):
        '''To disable the annotation, just pass :py:data:`annotate=False` to this routine, and it acts exactly like the
        Dolfin solve call. This is useful in cases where the solve is known to be irrelevant or diagnostic
        for the purposes of the adjoint computation (such as projecting fields to other function spaces
        for the purposes of visualisation).'''

        to_annotate = utils.to_annotate(kwargs.pop("annotate", None))

        if to_annotate:
            if len(args) == 2:
                try:
                    A = self.matrix.form
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs("Your matrix A has to have the .form attribute: was it assembled after from dolfin_adjoint import *?")

                try:
                    self.op_bcs = self.matrix.bcs
                except AttributeError:
                    self.op_bcs = []

                try:
                    x = args[0].function
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs("Your solution x has to have a .function attribute; is it the .vector() of a Function?")

                try:
                    b = args[1].form
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs("Your RHS b has to have the .form attribute: was it assembled after from dolfin_adjoint import *?")

                try:
                    eq_bcs = misc.uniq(self.op_bcs + args[1].bcs)
                except AttributeError:
                    eq_bcs = self.op_bcs

            elif len(args) == 3:
                A = args[0].form
                try:
                    x = args[1].function
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs("Your solution x has to have a .function attribute; is it the .vector() of a Function?")

                try:
                    self.op_bcs = A.bcs
                except AttributeError:
                    self.op_bcs = []

                try:
                    b = args[2].form
                except AttributeError:
                    raise libadjoint.exceptions.LibadjointErrorInvalidInputs("Your RHS b has to have the .form attribute: was it assembled after from dolfin_adjoint import *?")

                try:
                    eq_bcs = misc.uniq(self.op_bcs + args[2].bcs)
                except AttributeError:
                    eq_bcs = self.op_bcs

            else:
                raise libadjoint.exceptions.LibadjointErrorInvalidInputs("LUSolver.solve() must be called with either (A, x, b) or (x, b).")

            if self.parameters["reuse_factorization"] and self.__global_list_idx__ is None:
                self.__global_list_idx__ = len(lu_solvers)
                lu_solvers.append(self)
                adj_lu_solvers.append(None)

            solving.annotate(A == b, x, eq_bcs, solver_parameters={"linear_solver": "lu"}, matrix_class=make_LUSolverMatrix(self.__global_list_idx__, self.parameters["reuse_factorization"]))

        out = dolfin.LUSolver.solve(self, *args, **kwargs)

        if to_annotate:
            if dolfin.parameters["adjoint"]["record_all"]:
                adjglobals.adjointer.record_variable(adjglobals.adj_variables[x], libadjoint.MemoryStorage(adjlinalg.Vector(x)))

        return out
Exemplo n.º 22
0
    def __call__(self, m_dot, project=False):
        flag = misc.pause_annotation()
        hess_action_timer = backend.Timer("Hessian action")

        m_p = self.m.set_perturbation(m_dot)
        last_timestep = adjglobals.adjointer.timestep_count

        m_dot = enlist(m_dot)
        Hm = []
        for m_dot_cmp in m_dot:
            if hasattr(m_dot_cmp, 'function_space'):
                Hm.append(backend.Function(m_dot_cmp.function_space()))
            elif isinstance(m_dot_cmp, float):
                Hm.append(0.0)
            else:
                raise NotImplementedError(
                    "Sorry, don't know how to handle this")

        tlm_timer = backend.Timer("Hessian action (TLM)")
        # run the tangent linear model
        for (tlm, tlm_var) in compute_tlm(m_p, forget=None):
            pass

        tlm_timer.stop()

        # run the adjoint and second-order adjoint equations.
        for i in range(adjglobals.adjointer.equation_count)[::-1]:
            adj_var = adjglobals.adjointer.get_forward_variable(i).to_adjoint(
                self.J)
            # Only recompute the adjoint variable if we do not have it yet
            try:
                adj = adjglobals.adjointer.get_variable_value(adj_var)
            except (libadjoint.exceptions.LibadjointErrorHashFailed,
                    libadjoint.exceptions.LibadjointErrorNeedValue):
                adj_timer = backend.Timer("Hessian action (ADM)")
                adj = adjglobals.adjointer.get_adjoint_solution(i, self.J)[1]
                adj_timer.stop()

                storage = libadjoint.MemoryStorage(adj)
                adjglobals.adjointer.record_variable(adj_var, storage)

            adj = adj.data

            soa_timer = backend.Timer("Hessian action (SOA)")
            (soa_var,
             soa_vec) = adjglobals.adjointer.get_soa_solution(i, self.J, m_p)
            soa_timer.stop()
            soa = soa_vec.data

            def hess_inner(Hm, out):
                assert len(out) == len(Hm)
                for i in range(len(out)):
                    if out[i] is not None:
                        if isinstance(Hm[i], backend.Function):
                            Hm[i].vector().axpy(1.0, out[i].vector())
                        elif isinstance(Hm[i], float):
                            Hm[i] += out[i]
                        else:
                            raise ValueError(
                                "Do not know what to do with this")
                return Hm

            func_timer = backend.Timer("Hessian action (derivative formula)")
            # now implement the Hessian action formula.
            out = self.m.equation_partial_derivative(adjglobals.adjointer, soa,
                                                     i, soa_var.to_forward())
            Hm = hess_inner(Hm, out)

            out = self.m.equation_partial_second_derivative(
                adjglobals.adjointer, adj, i, soa_var.to_forward(), m_dot)
            Hm = hess_inner(Hm, out)

            if last_timestep > adj_var.timestep:
                # We have hit a new timestep, and need to compute this timesteps' \partial^2 J/\partial m^2 contribution
                last_timestep = adj_var.timestep
                out = self.m.functional_partial_second_derivative(
                    adjglobals.adjointer, self.J, adj_var.timestep, m_dot)
                Hm = hess_inner(Hm, out)

            func_timer.stop()

            storage = libadjoint.MemoryStorage(soa_vec)
            storage.set_overwrite(True)
            adjglobals.adjointer.record_variable(soa_var, storage)

        for Hm_cmp in Hm:
            if isinstance(Hm_cmp, backend.Function):
                Hm_cmp.rename("d^2(%s)/d(%s)^2" % (str(self.J), str(self.m)),
                              "a Function from dolfin-adjoint")

        misc.continue_annotation(flag)
        return postprocess(Hm, project, list_type=self.enlisted_controls)
Exemplo n.º 23
0
def dolfin_adjoint_assign(self, other, annotate=None, *args, **kwargs):
    '''We also need to monkeypatch the Function.assign method, as it is often used inside
    the main time loop, and not annotating it means you get the adjoint wrong for totally
    nonobvious reasons. If anyone objects to me monkeypatching your objects, my apologies
    in advance.'''

    if self is other:
        return

    to_annotate = utils.to_annotate(annotate)
    # if we shouldn't annotate, just assign
    if not to_annotate:
        return dolfin_assign(self, other, *args, **kwargs)

    if isinstance(other, ufl.algebra.Sum) or isinstance(
            other, ufl.algebra.Product):
        if backend.__name__ != 'dolfin':
            errmsg = '''Cannot use Function.assign(linear combination of other Functions) yet.'''
            raise libadjoint.exceptions.LibadjointErrorNotImplemented(errmsg)
        else:
            lincom = _check_and_contract_linear_comb(other, self)
    else:
        lincom = [(other, 1.0)]

    # ignore anything not a backend.Function, unless the user insists
    if not isinstance(other, backend.Function) and (annotate is not True):
        return dolfin_assign(self, other, *args, **kwargs)

    # ignore anything that is an interpolation, rather than a straight assignment
    if hasattr(self, "function_space") and hasattr(other, "function_space"):
        if str(self.function_space()) != str(other.function_space()):
            return dolfin_assign(self, other, *args, **kwargs)

    functions, weights = zip(*lincom)

    self_var = adjglobals.adj_variables[self]
    function_vars = [
        adjglobals.adj_variables[function] for function in functions
    ]

    # ignore any functions we haven't seen before -- we DON'T want to
    # annotate the assignment of initial conditions here. That happens
    # in the main solve wrapper.
    for function_var in function_vars:
        if not adjglobals.adjointer.variable_known(
                function_var) and not adjglobals.adjointer.variable_known(
                    self_var) and (annotate is not True):
            [
                adjglobals.adj_variables.forget(function)
                for function in functions
            ]
            adjglobals.adj_variables.forget(self)

            return dolfin_assign(self, other, *args, **kwargs)

    # OK, so we have a variable we've seen before. Beautiful.
    if not adjglobals.adjointer.variable_known(self_var):
        adjglobals.adj_variables.forget(self)

    out = dolfin_assign(self, other, *args, **kwargs)

    fn_space = self.function_space()
    identity_block = utils.get_identity_block(fn_space)
    dep = adjglobals.adj_variables.next(self)

    if backend.parameters["adjoint"]["record_all"]:
        adjglobals.adjointer.record_variable(
            dep, libadjoint.MemoryStorage(adjlinalg.Vector(self)))

    rhs = LinComRHS(functions, weights, fn_space)
    register_initial_conditions(zip(rhs.coefficients(), rhs.dependencies()),
                                linear=True)
    initial_eq = libadjoint.Equation(dep,
                                     blocks=[identity_block],
                                     targets=[dep],
                                     rhs=rhs)
    cs = adjglobals.adjointer.register_equation(initial_eq)

    do_checkpoint(cs, dep, rhs)

    return out
Exemplo n.º 24
0
def annotate_split(bigfn, idx, smallfn, bcs):
    fn_space = smallfn.function_space().collapse()
    test = backend.TestFunction(fn_space)
    trial = backend.TrialFunction(fn_space)
    eq_lhs = backend.inner(test, trial) * backend.dx

    key = "{}split{}{}{}{}".format(eq_lhs, smallfn, bigfn, idx,
                                   random.random()).encode('utf8')
    diag_name = "Split:%s:" % idx + hashlib.md5(key).hexdigest()

    diag_deps = []
    diag_block = libadjoint.Block(
        diag_name,
        dependencies=diag_deps,
        test_hermitian=backend.parameters["adjoint"]["test_hermitian"],
        test_derivative=backend.parameters["adjoint"]["test_derivative"])

    solving.register_initial_conditions(
        [(bigfn, adjglobals.adj_variables[bigfn])], linear=True, var=None)

    var = adjglobals.adj_variables.next(smallfn)
    frozen_expressions_dict = expressions.freeze_dict()

    def diag_assembly_cb(dependencies, values, hermitian, coefficient,
                         context):
        '''This callback must conform to the libadjoint Python block assembly
        interface. It returns either the form or its transpose, depending on
        the value of the logical hermitian.'''

        assert coefficient == 1

        expressions.update_expressions(frozen_expressions_dict)
        value_coeffs = [v.data for v in values]
        eq_l = eq_lhs

        if hermitian:
            adjoint_bcs = [
                utils.homogenize(bc)
                for bc in bcs if isinstance(bc, backend.DirichletBC)
            ] + [bc for bc in bcs if not isinstance(bc, backend.DirichletBC)]
            if len(adjoint_bcs) == 0: adjoint_bcs = None
            return (adjlinalg.Matrix(backend.adjoint(eq_l), bcs=adjoint_bcs),
                    adjlinalg.Vector(None, fn_space=fn_space))
        else:
            return (adjlinalg.Matrix(eq_l, bcs=bcs),
                    adjlinalg.Vector(None, fn_space=fn_space))

    diag_block.assemble = diag_assembly_cb

    rhs = SplitRHS(test, bigfn, idx)

    eqn = libadjoint.Equation(var, blocks=[diag_block], targets=[var], rhs=rhs)

    cs = adjglobals.adjointer.register_equation(eqn)
    solving.do_checkpoint(cs, var, rhs)

    if backend.parameters["adjoint"]["fussy_replay"]:
        mass = eq_lhs
        smallfn_massed = backend.Function(fn_space)
        backend.solve(mass == backend.action(mass, smallfn), smallfn_massed)
        assert False, "No idea how to assign to a subfunction yet .. "
        #assignment.dolfin_assign(bigfn, smallfn_massed)

    if backend.parameters["adjoint"]["record_all"]:
        smallfn_record = backend.Function(fn_space)
        assignment.dolfin_assign(smallfn_record, smallfn)
        adjglobals.adjointer.record_variable(
            var, libadjoint.MemoryStorage(adjlinalg.Vector(smallfn_record)))
Exemplo n.º 25
0
def perturbed_replay(parameter,
                     perturbation,
                     perturbation_scale,
                     observation,
                     perturbation_norm="mass",
                     observation_norm="mass",
                     callback=None,
                     forget=False):
    r"""Perturb the forward run and compute

    .. math::

      \frac{
      \left|\left| \delta \mathrm{observation} \right|\right|
      }{
      \left|\left| \delta \mathrm{input} \right| \right|
      }

    as a function of time.

    :py:data:`parameter` -- an FunctionControl to say what variable should be
                            perturbed (e.g. FunctionControl('InitialConcentration'))
    :py:data:`perturbation` -- a Function to give the perturbation direction (from a GST analysis, for example)
    :py:data:`perturbation_norm` -- a bilinear Form which induces a norm on the space of perturbation inputs
    :py:data:`perturbation_scale` -- how big the norm of the initial perturbation should be
    :py:data:`observation` -- the variable to observe (e.g. 'Concentration')
    :py:data:`observation_norm` -- a bilinear Form which induces a norm on the space of perturbation outputs
    :py:data:`callback` -- a function f(var, perturbed, unperturbed) that the user can supply (e.g. to dump out variables during the perturbed replay)
    """

    if not backend.parameters["adjoint"]["record_all"]:
        info_red(
            "Warning: your replay test will be much more effective with backend.parameters['adjoint']['record_all'] = True."
        )

    assert isinstance(parameter, controls.FunctionControl)

    if perturbation_norm == "mass":
        p_fnsp = perturbation.function_space()
        u = backend.TrialFunction(p_fnsp)
        v = backend.TestFunction(p_fnsp)
        p_mass = backend.inner(u, v) * backend.dx
        perturbation_norm = p_mass

    if not isinstance(perturbation_norm, backend.GenericMatrix):
        perturbation_norm = backend.assemble(perturbation_norm)
    if not isinstance(observation_norm,
                      backend.GenericMatrix) and observation_norm != "mass":
        observation_norm = backend.assemble(observation_norm)

    def compute_norm(perturbation, norm):
        # Need to compute <x, Ax> and then take its sqrt
        # where x is perturbation, A is norm
        try:
            vec = perturbation.vector()
        except:
            vec = perturbation

        Ax = vec.copy()
        norm.mult(vec, Ax)
        xAx = vec.inner(Ax)
        return math.sqrt(xAx)

    growths = []

    for i in range(adjglobals.adjointer.equation_count):
        (fwd_var, output) = adjglobals.adjointer.get_forward_solution(i)

        if fwd_var == parameter.var:  # we've hit the initial condition we want to perturb
            current_norm = compute_norm(perturbation, perturbation_norm)
            output.data.vector()[:] += (perturbation_scale /
                                        current_norm) * perturbation.vector()

        unperturbed = adjglobals.adjointer.get_variable_value(fwd_var).data

        if fwd_var.name == observation:  # we've hit something we want to observe
            # Fetch the unperturbed result from the record

            if observation_norm == "mass":  # we can't do this earlier, because we don't have the observation function space yet
                o_fnsp = output.data.function_space()
                u = backend.TrialFunction(o_fnsp)
                v = backend.TestFunction(o_fnsp)
                o_mass = backend.inner(u, v) * backend.dx
                observation_norm = backend.assemble(o_mass)

            diff = output.data.vector() - unperturbed.vector()
            growths.append(
                compute_norm(diff, observation_norm) /
                perturbation_scale)  # <--- the action line

        if callback is not None:
            callback(fwd_var, output.data, unperturbed)

        storage = libadjoint.MemoryStorage(output)
        storage.set_compare(tol=None)
        storage.set_overwrite(True)
        out = adjglobals.adjointer.record_variable(fwd_var, storage)

        if forget:
            adjglobals.adjointer.forget_forward_equation(i)

    # can happen if we initialised a nonlinear solve with a constant zero guess
    if growths[0] == 0.0:
        return growths[1:]
    else:
        return growths
Exemplo n.º 26
0
def compute_gradient(J,
                     param,
                     forget=True,
                     ignore=[],
                     callback=lambda var, output: None,
                     project=False):
    if not isinstance(J, Functional):
        raise ValueError("J must be of type dolfin_adjoint.Functional.")

    flag = misc.pause_annotation()

    enlisted_controls = enlist(param)
    param = ListControl(enlisted_controls)

    if backend.parameters["adjoint"]["allow_zero_derivatives"]:
        dJ_init = []
        for c in enlisted_controls:
            if isinstance(c.data(), backend.Constant):
                dJ_init.append(backend.Constant(0))
            elif isinstance(c.data(), backend.Function):
                space = c.data().function_space()
                dJ_init.append(backend.Function(space))

    else:
        dJ_init = [None] * len(enlisted_controls)

    dJdparam = enlisted_controls.__class__(dJ_init)

    last_timestep = adjglobals.adjointer.timestep_count

    ignorelist = []
    for fn in ignore:
        if isinstance(fn, backend.Function):
            ignorelist.append(adjglobals.adj_variables[fn])
        elif isinstance(fn, str):
            ignorelist.append(libadjoint.Variable(fn, 0, 0))
        else:
            ignorelist.append(fn)

    for i in range(adjglobals.adjointer.timestep_count):
        adjglobals.adjointer.set_functional_dependencies(J, i)

    for i in range(adjglobals.adjointer.equation_count)[::-1]:
        fwd_var = adjglobals.adjointer.get_forward_variable(i)
        if fwd_var in ignorelist:
            info("Ignoring the adjoint equation for %s" % fwd_var)
            continue

        (adj_var, output) = adjglobals.adjointer.get_adjoint_solution(i, J)

        callback(adj_var, output.data)

        storage = libadjoint.MemoryStorage(output)
        storage.set_overwrite(True)
        adjglobals.adjointer.record_variable(adj_var, storage)
        fwd_var = libadjoint.Variable(adj_var.name, adj_var.timestep,
                                      adj_var.iteration)

        out = param.equation_partial_derivative(adjglobals.adjointer,
                                                output.data, i, fwd_var)
        dJdparam = _add(dJdparam, out)

        if last_timestep > adj_var.timestep:
            # We have hit a new timestep, and need to compute this timesteps' \partial J/\partial m contribution
            out = param.functional_partial_derivative(adjglobals.adjointer, J,
                                                      adj_var.timestep)
            dJdparam = _add(dJdparam, out)

        last_timestep = adj_var.timestep

        if forget is None:
            pass
        elif forget:
            adjglobals.adjointer.forget_adjoint_equation(i)
        else:
            adjglobals.adjointer.forget_adjoint_values(i)

    rename(J, dJdparam, param)

    misc.continue_annotation(flag)

    return postprocess(dJdparam, project, list_type=enlisted_controls)
Exemplo n.º 27
0
    J = Functional(inner(sigma0[2], sigma0[2]) * dx * dtm[FINISH_TIME])
    param = ScalarParameter(amplitude)
    adjointer = solving.adjointer

    # Copy the code from compute_gradient:
    #dJdp = compute_gradient(J, ScalarParameter(amplitude))
    # as we want to also dump out norms of the adjoint solution.
    #   --------------------------------------------------------------
    norm0s = []
    norm1s = []
    dJdparam = None

    for i in range(adjointer.equation_count)[::-1]:
        (adj_var, output) = adjointer.get_adjoint_solution(i, J)

        storage = libadjoint.MemoryStorage(output)
        adjointer.record_variable(adj_var, storage)
        fwd_var = libadjoint.Variable(adj_var.name, adj_var.timestep,
                                      adj_var.iteration)

        afile = File("%s/adjoint_%s_%d.xml" % (dirname, adj_var.name, i))
        afile << output.data

        out = param.inner_adjoint(adjointer, output.data, i, fwd_var)
        if dJdparam is None:
            dJdparam = out
        elif dJdparam is not None and out is not None:
            dJdparam += out

        if adj_var.name == "w_{10}":
            (adj_sigma0, adj_sigma1, adj_v, adj_gamma) = output.data.split()