Beispiel #1
0
    def nucnorm(self, x):
        """Calculate nuclear norm

        This method returns the nuclear norm error of the deconvolved data in
        matrix form

        Parameters
        ----------
        x : np.ndarray
            Deconvolved data array

        Returns
        -------
        float nuclear norm value

        """

        x_prime = cube2matrix(x)

        nuc_norm = nuclear_norm(x_prime)

        if self.print_cost:
            print ' - NUCLEAR NORM:', nuc_norm

        return nuc_norm
Beispiel #2
0
    def nucnorm(self, x):

        if self.data_format == 'map':
            x_prime = map2matrix(x)

        else:
            x_prime = cube2matrix(x)

        nuc_norm = nuclear_norm(x_prime)

        if self.print_cost:
            print ' - NUCLEAR NORM:', nuc_norm

        return nuc_norm
Beispiel #3
0
    def descent(self, tolerance=1e-4, max_iter=150):

        self.operator.get_spec_rad()
        cost0 = 0.0

        for i in range(max_iter):

            grad = self.operator.grad_step(self.algorithm.data_rec, self.data)
            self.algorithm.check_threshold(i, grad)
            self.algorithm.update(grad, self.operator.inv_spec_rad)

            l2norm0 = norm(self.data - pca_convolve(self.algorithm.data_rec_prev, self.operator.psf_pcs, self.operator.psf_coef), 2)
            l2norm1 = norm(self.data - pca_convolve(self.algorithm.data_rec, self.operator.psf_pcs, self.operator.psf_coef), 2)

            if l2norm1 > l2norm0:
                self.algorithm.speed_switch(False)

            nuc_norm = nuclear_norm(map2matrix(self.algorithm.data_rec,
                                    self.layout))
            l2norm = norm(self.data - pca_convolve(self.algorithm.data_rec,
                          self.operator.psf_pcs, self.operator.psf_coef), 2)
            cost = (0.5 * l2norm ** 2 + self.algorithm.thresh * nuc_norm)

            print ' - i:', i, cost, l2norm, nuc_norm

            if np.abs(cost - cost0) < tolerance:
                print ' - Gradient Descent converged after %d iterations!' % \
                      (i + 1)
                print ' - Final cost, l2norm, nuc_norm:', cost, l2norm, nuc_norm
                break

            elif i == max_iter - 1:
                print ' - Gradient Descent did not converge after %d iterations!' \
                      % max_iter
                print ' - Final cost, l2norm, nuc_norm:', cost, l2norm, nuc_norm

            cost0 = cost

        return self.algorithm.data_rec