def gradient(self):
     grad_df = self.data_fit.gradient
     grad_of = self.of_constr.gradient
     if self.aug_lagr is True:
         grad_al = self.aug_lagr_term.gradient
         return BroadcastOperator(*[
             grad_df[i] + grad_of[i] + grad_al[i] for i in range(2 * self.N)
         ])
     else:
         return BroadcastOperator(
             *[grad_df[i] + grad_of[i] for i in range(2 * self.N)])
Beispiel #2
0
 def _reconstruct(self, observation, out):
     observation = self.observation_space.element(observation)
     out_ = out
     if out not in self.reco_space:
         out_ = self.reco_space.zero()
     out_[:] = self.x0
     gradient = Gradient(self.op.domain)
     L = BroadcastOperator(self.op, gradient)
     f = ZeroFunctional(self.op.domain)
     l2_norm = L2NormSquared(self.op.range).translated(observation)
     l1_norm = self.lam * L1Norm(gradient.range)
     g = SeparableSum(l2_norm, l1_norm)
     op_norm = 1.1 * power_method_opnorm(L, maxiter=20)
     sigma = self.tau * op_norm**2
     admm.admm_linearized(out_,
                          f,
                          g,
                          L,
                          self.tau,
                          sigma,
                          self.niter,
                          callback=self.callback)
     if out not in self.reco_space:
         out[:] = out_
     return out
Beispiel #3
0
 def _reconstruct(self, observation, out):
     observation = self.observation_space.element(observation)
     out[:] = self.x0
     gradient = Gradient(self.op.domain)
     L = BroadcastOperator(self.op, gradient)
     f = ZeroFunctional(self.op.domain)
     l2_norm = L2NormSquared(self.op.range).translated(observation)
     l1_norm = self.lam * L1Norm(gradient.range)
     g = [l2_norm, l1_norm]
     tau, sigma = douglas_rachford.douglas_rachford_pd_stepsize(L)
     douglas_rachford.douglas_rachford_pd(out,
                                          f,
                                          g,
                                          L,
                                          self.niter,
                                          tau,
                                          sigma,
                                          callback=self.callback)
     return out
Beispiel #4
0
 def _reconstruct(self, observation, out):
     observation = self.observation_space.element(observation)
     out[:] = self.x0
     gradient = Gradient(self.op.domain)
     L = BroadcastOperator(self.op, gradient)
     f = ZeroFunctional(self.op.domain)
     l2_norm = L2NormSquared(self.op.range).translated(observation)
     l1_norm = self.lam * L1Norm(gradient.range)
     g = SeparableSum(l2_norm, l1_norm)
     tau, sigma = primal_dual_hybrid_gradient.pdhg_stepsize(L)
     primal_dual_hybrid_gradient.pdhg(out,
                                      f,
                                      g,
                                      L,
                                      self.niter,
                                      tau,
                                      sigma,
                                      callback=self.callback)
     return out
 def gradient(self):
     return BroadcastOperator(
         *[self.partial_gradient(i) for i in range(2 * self.N)])