Exemplo n.º 1
0
    def forward(ctx, pfcn, ts, fwd_options, bck_options, nparams, y0,
                *allparams):
        config = fwd_options
        ctx.bck_config = set_default_option(config, bck_options)

        params = allparams[:nparams]
        objparams = allparams[nparams:]

        method = config.pop("method")
        methods = {
            "rk4": rk4_ivp,
            "rk38": rk38_ivp,
            "rk23": rk23_adaptive,
            "rk45": rk45_adaptive,
        }
        solver = get_method("solve_ivp", methods, method)
        yt = solver(pfcn, ts, y0, params, **config)

        # save the parameters for backward
        ctx.param_sep = TensorNonTensorSeparator(allparams, varonly=True)
        tensor_params = ctx.param_sep.get_tensor_params()
        ctx.save_for_backward(ts, y0, *tensor_params)
        ctx.pfcn = pfcn
        ctx.nparams = nparams
        ctx.yt = yt
        ctx.ts_requires_grad = ts.requires_grad

        return yt
Exemplo n.º 2
0
    def forward(ctx, A, neig, mode, M, fwd_options, bck_options, na,
                *amparams):
        # A: LinearOperator (*BA, q, q)
        # M: LinearOperator (*BM, q, q) or None

        # separate the sets of parameters
        params = amparams[:na]
        mparams = amparams[na:]

        config = set_default_option({}, fwd_options)
        ctx.bck_config = set_default_option(
            {
                # "method": ???
            },
            bck_options)

        method = config.pop("method")
        with A.uselinopparams(*params), M.uselinopparams(
                *mparams) if M is not None else dummy_context_manager():
            methods = {
                "davidson": davidson,
                "custom_exacteig": custom_exacteig,
            }
            method_fcn = get_method("symeig", methods, method)
            evals, evecs = method_fcn(A, neig, mode, M, **config)

        # save for the backward
        ctx.evals = evals  # (*BAM, neig)
        ctx.evecs = evecs  # (*BAM, na, neig)
        ctx.params = params
        ctx.A = A
        ctx.M = M
        ctx.mparams = mparams
        return evals, evecs
Exemplo n.º 3
0
    def forward(ctx, fcn, y0, options, bck_options, nparams, *allparams):

        # set default options
        config = options
        ctx.bck_options = bck_options

        params = allparams[:nparams]
        objparams = allparams[nparams:]

        with fcn.useobjparams(objparams):

            method = config.pop("method")
            methods = {
                "broyden1": broyden1,
                "broyden2": broyden2,
                "linearmixing": linearmixing,
            }
            method_fcn = get_method("rootfinder", methods, method)
            y = method_fcn(fcn, y0, params, **config)

        ctx.fcn = fcn

        # split tensors and non-tensors params
        ctx.nparams = nparams
        ctx.param_sep = TensorNonTensorSeparator(allparams)
        tensor_params = ctx.param_sep.get_tensor_params()
        ctx.save_for_backward(y, *tensor_params)

        return y
Exemplo n.º 4
0
    def forward(ctx, fcn, y0, fwd_fcn, is_opt_method, options, bck_options,
                nparams, *allparams):
        # fcn: a function that returns what has to be 0 (will be used in the
        #      backward, not used in the forward). For minimization, it is
        #      the gradient
        # fwd_fcn: a function that will be executed in the forward method
        #          (unused in the backward)
        # This class is also used for minimization, where fcn and fwd_fcn might
        # be slightly different

        # set default options
        config = options
        ctx.bck_options = bck_options

        params = allparams[:nparams]
        objparams = allparams[nparams:]

        with fwd_fcn.useobjparams(objparams):

            method = config.pop("method")
            methods = _RF_METHODS if not is_opt_method else _OPT_METHODS
            name = "rootfinder" if not is_opt_method else "minimizer"
            method_fcn = get_method(name, methods, method)
            y = method_fcn(fwd_fcn, y0, params, **config)

        ctx.fcn = fcn
        ctx.is_opt_method = is_opt_method

        # split tensors and non-tensors params
        ctx.nparams = nparams
        ctx.param_sep = TensorNonTensorSeparator(allparams)
        tensor_params = ctx.param_sep.get_tensor_params()
        ctx.save_for_backward(y, *tensor_params)

        return y
Exemplo n.º 5
0
 def __init__(self,
              x: torch.Tensor,
              y: Optional[torch.Tensor] = None,
              method: Union[str, Callable, None] = None,
              **fwd_options):
     if method is None:
         method = "cspline"
     methods = {
         "cspline": CubicSpline1D,
     }
     method_cls = get_method("Interp1D", methods, method)
     self.obj = method_cls(x, y, **fwd_options)
Exemplo n.º 6
0
    def forward(ctx, fcn, xl, xu, fwd_options, bck_options, nparams, dtype,
                device, *all_params):

        with fcn.disable_state_change():

            config = fwd_options
            ctx.bck_config = set_default_option(config, bck_options)

            params = all_params[:nparams]
            objparams = all_params[nparams:]

            # convert to tensor
            xl = torch.as_tensor(xl, dtype=dtype, device=device)
            xu = torch.as_tensor(xu, dtype=dtype, device=device)

            # apply transformation if the boundaries contain inf
            if _isinf(xl) or _isinf(xu):
                tfm = _TanInfTransform()

                @make_sibling(fcn)
                def fcn2(t, *params):
                    ys = fcn(tfm.forward(t), *params)
                    dxdt = tfm.dxdt(t)
                    return ys * dxdt

                tl = tfm.x2t(xl)
                tu = tfm.x2t(xu)
            else:
                fcn2 = fcn
                tl = xl
                tu = xu

            method = config.pop("method")
            methods = {"leggauss": leggauss}
            method_fcn = get_method("quad", methods, method)
            y = method_fcn(fcn2, tl, tu, params, **config)

            # save the parameters for backward
            ctx.param_sep = TensorNonTensorSeparator(all_params)
            tensor_params = ctx.param_sep.get_tensor_params()
            ctx.xltensor = isinstance(xl, torch.Tensor)
            ctx.xutensor = isinstance(xu, torch.Tensor)
            xlxu_tensor = ([xl] if ctx.xltensor else []) + \
                          ([xu] if ctx.xutensor else [])
            ctx.xlxu_nontensor = ([xl] if not ctx.xltensor else []) + \
                                 ([xu] if not ctx.xutensor else [])
            ctx.save_for_backward(*xlxu_tensor, *tensor_params)
            ctx.fcn = fcn
            ctx.nparams = nparams
            return y
Exemplo n.º 7
0
    def __init__(self, x: torch.Tensor,
                 method: Union[str, Callable, None] = None,
                 **fwd_options):
        if method is None:
            method = "cspline"
        if not (isinstance(x, torch.Tensor) and len(x.shape) == 1):
            raise RuntimeError("The input x to SQuad must be a 1D tensor")

        all_clss = {
            "cspline": CubicSplineSQuad,
            "simpson": SimpsonSQuad,
            "trapz": TrapzSQuad,
        }
        clss = get_method("SQuad", all_clss, method)
        self.obj = clss(x, **fwd_options)
        self.nx = x.shape[-1]
Exemplo n.º 8
0
    def forward(ctx, ffcn, log_pfcn, x0, xsamples, wsamples, method,
                fwd_options, bck_options, nfparams, nf_objparams, npparams,
                *all_fpparams):
        # set up the default options
        config = fwd_options
        ctx.bck_config = set_default_option(config, bck_options)

        # split the parameters
        fparams = all_fpparams[:nfparams]
        fobjparams = all_fpparams[nfparams:nfparams + nf_objparams]
        pparams = all_fpparams[nfparams + nf_objparams:nfparams +
                               nf_objparams + npparams]
        pobjparams = all_fpparams[nfparams + nf_objparams + npparams:]

        # select the method for the sampling
        if xsamples is None:
            methods = {
                "mh": mh,
                "_dummy1d": dummy1d,
                "mhcustom": mhcustom,
            }
            method_fcn = get_method("mcquad", methods, method)
            xsamples, wsamples = method_fcn(log_pfcn, x0, pparams, **config)
        epf = _integrate(ffcn, xsamples, wsamples, fparams)

        # save parameters for backward calculations
        ctx.xsamples = xsamples
        ctx.wsamples = wsamples
        ctx.ffcn = ffcn
        ctx.log_pfcn = log_pfcn
        ctx.fparam_sep = TensorNonTensorSeparator((*fparams, *fobjparams))
        ctx.pparam_sep = TensorNonTensorSeparator((*pparams, *pobjparams))
        ctx.nfparams = len(fparams)
        ctx.npparams = len(pparams)
        ctx.method = method

        # save for backward
        ftensor_params = ctx.fparam_sep.get_tensor_params()
        ptensor_params = ctx.pparam_sep.get_tensor_params()
        ctx.nftensorparams = len(ftensor_params)
        ctx.nptensorparams = len(ptensor_params)
        ctx.save_for_backward(epf, *ftensor_params, *ptensor_params)

        return epf
Exemplo n.º 9
0
    def forward(ctx, A, B, E, M, method, fwd_options, bck_options, na,
                *all_params):
        # A: (*BA, nr, nr)
        # B: (*BB, nr, ncols)
        # E: (*BE, ncols) or None
        # M: (*BM, nr, nr) or None
        # all_params: list of tensor of any shape
        # returns: (*BABEM, nr, ncols)

        # separate the parameters for A and for M
        params = all_params[:na]
        mparams = all_params[na:]

        config = set_default_option({}, fwd_options)
        ctx.bck_config = set_default_option({}, bck_options)

        if torch.all(B == 0):  # special case
            dims = (*_get_batchdims(A, B, E, M), *B.shape[-2:])
            x = torch.zeros(dims, dtype=B.dtype, device=B.device)
        else:
            with A.uselinopparams(*params), M.uselinopparams(
                    *mparams) if M is not None else dummy_context_manager():
                methods = {
                    "custom_exactsolve": custom_exactsolve,
                    "scipy_gmres": wrap_gmres,
                    "broyden1": broyden1_solve,
                    "cg": cg,
                    "bicgstab": bicgstab,
                }
                method_fcn = get_method("solve", methods, method)
                x = method_fcn(A, B, E, M, **config)

        ctx.e_is_none = E is None
        ctx.A = A
        ctx.M = M
        if ctx.e_is_none:
            ctx.save_for_backward(x, *all_params)
        else:
            ctx.save_for_backward(x, E, *all_params)
        ctx.na = na
        return x
Exemplo n.º 10
0
    def forward(ctx, A, neig, mode, M, fwd_options, bck_options, na,
                *amparams):
        # A: LinearOperator (*BA, q, q)
        # M: LinearOperator (*BM, q, q) or None

        # separate the sets of parameters
        params = amparams[:na]
        mparams = amparams[na:]

        config = set_default_option({}, fwd_options)
        ctx.bck_config = set_default_option(
            {
                "degen_atol": None,
                "degen_rtol": None,
            }, bck_options)

        # options for calculating the backward (not for `solve`)
        alg_keys = ["degen_atol", "degen_rtol"]
        ctx.bck_alg_config = get_and_pop_keys(ctx.bck_config, alg_keys)

        method = config.pop("method")
        with A.uselinopparams(*params), M.uselinopparams(
                *mparams) if M is not None else dummy_context_manager():
            methods = {
                "davidson": davidson,
                "custom_exacteig": custom_exacteig,
            }
            method_fcn = get_method("symeig", methods, method)
            evals, evecs = method_fcn(A, neig, mode, M, **config)

        # save for the backward
        # evals: (*BAM, neig)
        # evecs: (*BAM, na, neig)
        ctx.save_for_backward(evals, evecs, *amparams)
        ctx.na = na
        ctx.A = A
        ctx.M = M
        return evals, evecs