Пример #1
0
def sparse_signal():
    dir_path = os.path.dirname(os.path.realpath(__file__))
    img = np.load(os.path.join(dir_path, 'test_img1.npy'))
    img = img.astype(float)
    img = filters.threshold(utils.normalize(img), 0.4)

    return preprocess.Patches(img, 8).patches
Пример #2
0
def test_threshold_hard():
    a = np.ones(10)

    b1 = filters.threshold(a.copy(), min_val=1.1)
    assert np.allclose(b1, a * 0)

    b2 = filters.threshold(a.copy(), min_val=0.5, max_val=0.9)
    assert np.allclose(b2, a * 0)

    b3 = filters.threshold(a.copy(), min_val=0.9)
    assert np.allclose(b3, a)

    a[:5] = 0.7

    b4 = filters.threshold(a.copy(), min_val=0.9)
    a[:5] = 0
    assert np.allclose(b4, a)
Пример #3
0
def reconstruct(rng):
    rank, start, stop = rng
    new_img = IMAGE_VOLUME[start:stop]

    for i in range(new_img.shape[0]):
        image = new_img[i]
        image = utils.normalize(image)
        image = filters.threshold(image, 0.4)

        patch_size = (16, 16)
        sigma = 10
        den = dl.Denoise(image, patch_size[0])
        den.dictionary = DICT
        denoised = den.denoise(sigma)
        new_img[i] = denoised.copy()

    slices_dir = slice_dir()
    fn = 'slice_%d_%d.npy' % (start, stop)
    np.save(os.path.join(slices_dir, fn), new_img)
Пример #4
0
def reconstruct_adept(data):
    rank, start, stop = data
    image = IMAGE_VOLUME[start:stop]
    fn = 'dct_slice_%d.npy'
    slice_path = slice_dir()

    for i in range(image.shape[0]):
        img = IMAGE_VOLUME[i]
        img = filters.threshold(utils.normalize(img), 0.4)
        patch_size = (16, 16)
        sigma = 10
        path = os.path.join(slice_path, fn % i)
        D = np.load(path)
        den = dl.Denoise(img, patch_size[0])
        den.dictionary = D
        denoised = den.denoise(sigma)
        image[i] = denoised.copy()

    slices_dir = slice_dir()
    fn = 'slice_%d_%d.npy' % (start, stop)
    np.save(os.path.join(slices_dir, fn), image)
Пример #5
0
                    help='Visualize the dictionary')

parser.add_argument('--smooth', type=int,
                    help='Apply gaussion smoothing with kernel size SIZE (odd)')

parser.add_argument('-j', '--n-threads', type=int, default=1,
                    help="Number of threads to use, default 1")

args = parser.parse_args()

image = misc.imread(args.image)
image = utils.normalize(image)


if args.threshold is not None:
    image = filters.threshold(image, args.threshold)

image *= 255

patch_size = args.patch_size
n_atoms = args.num_atoms
iters = args.iters
sigma = args.sigma


t1 = timeit.default_timer()

denoised = ksvd_denoise(image, patch_size, iters, n_atoms,
                        sigma, verbose=True, retDict=args.vis_dict,
                        n_threads=args.n_threads)
Пример #6
0
from __future__ import print_function
import os
import sys
import multiprocessing

import numpy as np

import dictlearn as dl
from dictlearn import utils, filters


DICT = np.load('frame_dict_trained.npy')
IMAGE_VOLUME = np.load('images/image_volume.npy')
IMAGE_VOLUME = utils.normalize(IMAGE_VOLUME)
IMAGE_VOLUME = filters.threshold(IMAGE_VOLUME, 0.4)


def slice_dir():
    home = os.path.expanduser('~')
    slices_dir = os.path.join(home, 'slices')
    try:
        os.mkdir(slices_dir)
    except OSError:
        pass

    return slices_dir


def train(data):
    rank, start, stop = data
    image = IMAGE_VOLUME[start:stop]