Example #1
0
def test_normalize():
    a = np.arange(11)
    aa = utils.normalize(a.copy())

    assert abs(aa.max() - 1) < 1e-12
    assert abs(aa.min()) < 1e-12
    assert np.allclose(aa, a / 10.0)

    ab = np.arange(-5, 6)
    aa = utils.normalize(ab)
    assert abs(aa.max() - 1) < 1e-12
    assert abs(aa.min()) < 1e-12
    assert np.allclose(aa, a / 10.0)
Example #2
0
def sparse_signal():
    dir_path = os.path.dirname(os.path.realpath(__file__))
    img = np.load(os.path.join(dir_path, 'test_img1.npy'))
    img = img.astype(float)
    img = filters.threshold(utils.normalize(img), 0.4)

    return preprocess.Patches(img, 8).patches
Example #3
0
def reconstruct(rng):
    rank, start, stop = rng
    new_img = IMAGE_VOLUME[start:stop]

    for i in range(new_img.shape[0]):
        image = new_img[i]
        image = utils.normalize(image)
        image = filters.threshold(image, 0.4)

        patch_size = (16, 16)
        sigma = 10
        den = dl.Denoise(image, patch_size[0])
        den.dictionary = DICT
        denoised = den.denoise(sigma)
        new_img[i] = denoised.copy()

    slices_dir = slice_dir()
    fn = 'slice_%d_%d.npy' % (start, stop)
    np.save(os.path.join(slices_dir, fn), new_img)
Example #4
0
def reconstruct_adept(data):
    rank, start, stop = data
    image = IMAGE_VOLUME[start:stop]
    fn = 'dct_slice_%d.npy'
    slice_path = slice_dir()

    for i in range(image.shape[0]):
        img = IMAGE_VOLUME[i]
        img = filters.threshold(utils.normalize(img), 0.4)
        patch_size = (16, 16)
        sigma = 10
        path = os.path.join(slice_path, fn % i)
        D = np.load(path)
        den = dl.Denoise(img, patch_size[0])
        den.dictionary = D
        denoised = den.denoise(sigma)
        image[i] = denoised.copy()

    slices_dir = slice_dir()
    fn = 'slice_%d_%d.npy' % (start, stop)
    np.save(os.path.join(slices_dir, fn), image)
Example #5
0
parser.add_argument('-f', '--format', default='png', help='Format to save image')
parser.add_argument('-t', '--threshold', type=float, help='Threshold original' + 
                    ' image at this minimum intensity')
parser.add_argument('--vis-dict', action='store_true',
                    help='Visualize the dictionary')

parser.add_argument('--smooth', type=int,
                    help='Apply gaussion smoothing with kernel size SIZE (odd)')

parser.add_argument('-j', '--n-threads', type=int, default=1,
                    help="Number of threads to use, default 1")

args = parser.parse_args()

image = misc.imread(args.image)
image = utils.normalize(image)


if args.threshold is not None:
    image = filters.threshold(image, args.threshold)

image *= 255

patch_size = args.patch_size
n_atoms = args.num_atoms
iters = args.iters
sigma = args.sigma


t1 = timeit.default_timer()
Example #6
0
from __future__ import print_function
import os
import sys
import multiprocessing

import numpy as np

import dictlearn as dl
from dictlearn import utils, filters


DICT = np.load('frame_dict_trained.npy')
IMAGE_VOLUME = np.load('images/image_volume.npy')
IMAGE_VOLUME = utils.normalize(IMAGE_VOLUME)
IMAGE_VOLUME = filters.threshold(IMAGE_VOLUME, 0.4)


def slice_dir():
    home = os.path.expanduser('~')
    slices_dir = os.path.join(home, 'slices')
    try:
        os.mkdir(slices_dir)
    except OSError:
        pass

    return slices_dir


def train(data):
    rank, start, stop = data
    image = IMAGE_VOLUME[start:stop]