def __init__(self, imagedims, nchannels, imageh=None, boundary="neumann", adjoint=None): LinOp.__init__(self) npoints = np.prod(imagedims) ndims = len(imagedims) self.imagedims = imagedims self.imageh = np.ones(ndims) if imageh is None else imageh self.bc = boundary self.nchannels = nchannels self.x = Variable((npoints, nchannels)) self.y = Variable((npoints, nchannels)) self._kernel = None if self.bc[-4:] != "_adj": self.spmat = lplcnopn(self.imagedims, components=self.nchannels, steps=self.imageh, boundaries=self.bc) if self.bc == "neumann": self.adjoint = self elif self.bc in self.supported_bc: if adjoint is None: adj_bc = self.bc[:-4] if self.bc[-4:] != "_adj": adj_bc = "%s_adj" % self.bc self.adjoint = LaplacianOp(imagedims, nchannels, imageh=imageh, boundary=adj_bc, adjoint=self) else: self.adjoint = adjoint else: raise Exception("Unknown boundary conditions: %s" % self.bc) if self.bc[-4:] == "_adj": self.spmat = self.adjoint.spmat.T
def __init__(self, M, N=None, adjoint=None): LinOp.__init__(self) N = M if N is None else N self.x = Variable(N) self.y = Variable(M) self.adjoint = ZeroOp(N, M, adjoint=self) if adjoint is None else adjoint self._call_cpu = self._call_gpu = self._call
def __init__(self, M, lbd=1.0, tmp=None): LinOp.__init__(self) self.M = M self.lbd = lbd self.x = M.x self.y = M.x self.tmp = M.y.new() if tmp is None else tmp self.adjoint = self
def __init__(self, N, A): LinOp.__init__(self) self.x = Variable((N, A.size)) self.y = Variable((N, A.size)) self.A = A self.adjoint = self self.spmat = einsumop('k,ik->ik', self.A, self.x[0]['shape']) self._kernel = None self.A_gpu = None
def __init__(self, A, tau, sigma): LinOp.__init__(self) self.A = A self.tau = tau self.sigma = sigma self.x = Variable((A.x.size,), (A.y.size,)) self.y = self.x self.xtmp = self.x.new() self.K = None self.H = None self.adjoint = SemismoothNewtonSystemAdjoint(self)
def __init__(self, N, M, lbd): # xnorms[i] = 1.0/|xbar[i,:,:]|_2 # exterior = (xbar > lbd) LinOp.__init__(self) self.x = Variable((N, M[0] * M[1])) self.y = self.x self.lbd = lbd self.adjoint = self self.extind = np.zeros(N, dtype=bool) self.intind = np.zeros(N, dtype=bool) self.xbar_normed = self.x.vars(self.x.new())[0] self.lbd_norms = np.zeros(N)
def __init__(self, K, N, P, B, adjoint=None): LinOp.__init__(self) assert P.shape[0] == B.shape[0] assert P.shape[1] == B.shape[2] self.x = Variable((B.shape[0],N,B.shape[1])) self.y = Variable((N,K)) self.P = P self.B = B if adjoint is None: self.adjoint = IndexedMult(K, N, B, P, adjoint=self) else: self.adjoint = adjoint self._kernel = None self.spmat = self.adjoint.spmat.T
def __init__(self, N, A, trans=False, adjoint=None): LinOp.__init__(self) (k, j) = (1, 0) if trans else (0, 1) self.x = Variable((N, A.shape[k])) self.y = Variable((N, A.shape[j])) self.trans = trans self.A = A if adjoint is None: subscripts = 'jk,ik->ij' if self.trans else 'kj,ik->ij' self.spmat = einsumop(subscripts, self.A, self.x[0]['shape']) self.adjoint = MatrixMultR(N, A, trans=not trans, adjoint=self) else: self.adjoint = adjoint self.spmat = self.adjoint.spmat.T self._kernel = None
def __init__(self, N, A, trans=False, adjoint=None): LinOp.__init__(self) (j, m) = (2, 0) if trans else (0, 2) self.x = Variable((A.shape[j], N, A.shape[j + 1])) self.y = Variable((A.shape[m], N, A.shape[m + 1])) self.trans = trans self.A = A if adjoint is None: subscripts = 'mkjl,jil->mik' if self.trans else 'jlmk,jil->mik' self.spmat = einsumop(subscripts, self.A, self.x[0]['shape']) self.adjoint = TangledMatrixMultR(N, A, trans=not trans, adjoint=self) else: self.adjoint = adjoint self.spmat = self.adjoint.spmat.T self._kernel = None
def __init__(self, K, N, B, P, adjoint=None): LinOp.__init__(self) assert P.shape[0] == B.shape[0] assert P.shape[1] == B.shape[2] self.x = Variable((N,K)) self.y = Variable((B.shape[0],N,B.shape[1])) self.P = P self.B = B self._kernel = None spP = [idxop(Pj, K) for Pj in P] spP = einsumop("jlk,ik->jil", spP, dims={ 'i': N }) self.spmat = -einsumop("jml,jil->jim", B, dims={ 'i': N }).dot(spP) if adjoint is None: self.adjoint = IndexedMultAdj(K, N, P, B, adjoint=self) else: self.adjoint = adjoint
def __init__(self, N, A, trans=False, adjoint=None): LinOp.__init__(self) (m, k) = (2, 1) if trans else (1, 2) self.x = Variable((A.shape[0], N, A.shape[m])) self.y = Variable((A.shape[0], N, A.shape[k])) self.trans = trans self.A = A if adjoint is None: subscripts = 'jkm,jlm->jlk' if self.trans else 'jmk,jlm->jlk' self.spmat = einsumop(subscripts, self.A, self.x[0]['shape']) self.adjoint = MatrixMultRBatched(N, A, trans=not trans, adjoint=self) else: self.adjoint = adjoint self.spmat = self.adjoint.spmat.T self._kernel = None
def __init__(self, imagedims, nchannels, imageh=None, weights=None, adjoint=None): LinOp.__init__(self) ndims = len(imagedims) npoints = np.prod(imagedims) self.imagedims = imagedims self.nchannels = nchannels self.x = Variable((npoints, ndims, nchannels)) self.y = Variable((npoints, nchannels)) self.imageh = np.ones(ndims) if imageh is None else imageh self.weights = np.ones(nchannels) if weights is None else weights self._kernels = None if adjoint is None: self.adjoint = GradientOp(imagedims, nchannels, imageh=self.imageh, weights=self.weights, adjoint=self) else: self.adjoint = adjoint self.spmat = self.adjoint.spmat.T
def __init__(self, imagedims, nchannels, scheme="centered", imageh=None, weights=None, adjoint=None): LinOp.__init__(self) ndims = len(imagedims) npoints = np.prod(imagedims) self.imagedims = imagedims self.nchannels = nchannels self.x = Variable((npoints, nchannels)) self.y = Variable((npoints, ndims, nchannels)) self.scheme = scheme self.imageh = np.ones(ndims) if imageh is None else imageh self.weights = np.ones(nchannels) if weights is None else weights self._kernels = None self.spmat = diffopn(self.imagedims, components=self.nchannels, steps=self.imageh, weights=self.weights, schemes=self.scheme) if adjoint is None: self.adjoint = DivergenceOp(imagedims, nchannels, imageh=self.imageh, weights=self.weights, adjoint=self) else: self.adjoint = adjoint
def __init__(self, N): LinOp.__init__(self) self.x = Variable(N) self.y = Variable(N) self.adjoint = self self._call_cpu = self._call_gpu = self._call
def __init__(self, M): LinOp.__init__(self) self.M = M self.x = M.y self.y = M.x self.adjoint = M
def __init__(self, N, fact): LinOp.__init__(self) self.x = Variable(N) self.y = Variable(N) self.adjoint = self self.fact = fact
def __init__(self, N, keep): LinOp.__init__(self) self.x = Variable(N) self.y = Variable(N) self.adjoint = self self.keep = keep.astype(bool)