class smooth_term_OF(Functional):
    def __init__(self,
                 space,
                 data,
                 forward,
                 tau,
                 alpha_df,
                 alpha_of,
                 grad=None,
                 huber=False,
                 gamma=1e-7,
                 aug_lagr=False,
                 lagr_mult=None):
        self.N = round(len(space) / 2)  # number of time steps\
        self.space = space
        self.image_space = self.space[0]
        self.space_time = ProductSpace(self.image_space, self.N)
        self.data = data
        self.forward = forward
        self.tau = tau
        self.alpha_df = alpha_df
        self.alpha_of = alpha_of
        self.grad = grad
        self.data_fit = DataFitL2TimeDep(self.space, self.data, self.forward,
                                         self.alpha_df)
        self.aug_lagr = aug_lagr
        if lagr_mult is None:
            self.lagr_mult = self.space_time.zero()
        else:
            self.lagr_mult = lagr_mult

        if huber is False:
            self.of_constr = L2OpticalFlowConstraint(self.space, self.tau,
                                                     self.alpha_of, self.grad)
        else:
            self.of_constr = HuberL1OpticalFlowConstraint(
                self.space, self.tau, self.alpha_of, self.grad, gamma)
        if aug_lagr is True:
            self.aug_lagr_term = AugmentedLagrangeTerm(self.space,
                                                       self.lagr_mult,
                                                       self.tau, self.grad)

        super(smooth_term_OF, self).__init__(space=space,
                                             linear=False,
                                             grad_lipschitz=np.nan)

    def __call__(self, x):
        ret = self.data_fit(x) + self.of_constr(x)
        if self.aug_lagr is True:
            ret += self.aug_lagr_term(x)
        return ret

    @property
    def gradient(self):
        grad_df = self.data_fit.gradient
        grad_of = self.of_constr.gradient
        if self.aug_lagr is True:
            grad_al = self.aug_lagr_term.gradient
            return BroadcastOperator(*[
                grad_df[i] + grad_of[i] + grad_al[i] for i in range(2 * self.N)
            ])
        else:
            return BroadcastOperator(
                *[grad_df[i] + grad_of[i] for i in range(2 * self.N)])