Exemplo n.º 1
0
from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco.admm import spline
from sporco import util
from sporco import metric
from sporco import plot

"""
Load reference image.
"""

img = util.ExampleImages().image('monarch.png', scaled=True,
                                 idxexp=np.s_[:,160:672], gray=True)


"""
Construct test image corrupted by 20% salt & pepper noise.
"""

np.random.seed(12345)
imgn = util.spnoise(img, 0.2)


"""
Set regularization parameter and options for ℓ1-spline solver. The regularization parameter used here has been manually selected for good performance.
"""

lmbda = 5.0
Exemplo n.º 2
0
from builtins import input
from builtins import range

import pyfftw  # See https://github.com/pyFFTW/pyFFTW/issues/40
import numpy as np

from sporco import util
from sporco import plot
import sporco.metric as sm
from sporco.admm import cbpdn
"""
Load example image.
"""

img = util.ExampleImages().image('kodim23.png',
                                 scaled=True,
                                 idxexp=np.s_[160:416, 60:316])
"""
Highpass filter example image.
"""

npd = 16
fltlmbd = 10
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)
"""
Load colour dictionary and display it.
"""

D = util.convdicts()['RGB:8x8x3x64']
plot.imview(util.tiledict(D), fgsz=(7, 7))
"""
Exemplo n.º 3
0
        return np.pad(x, n, mode='symmetric')
    else:
        return np.pad(x, ((n, n), (n, n), (0, 0)), mode='symmetric')


def crop(x, n=8):

    return x[n:-n, n:-n]


"""
Load a reference image and corrupt it with 33% salt and pepper noise. (The call to ``np.random.seed`` ensures that the pseudo-random noise is reproducible.)
"""

img = util.ExampleImages().image('monarch.png',
                                 zoom=0.5,
                                 scaled=True,
                                 idxexp=np.s_[:, 160:672])
np.random.seed(12345)
imgn = util.spnoise(img, 0.33)
"""
We use a colour dictionary. The impulse denoising problem is solved by appending some additional filters to the learned dictionary ``D0``, which is one of those distributed with SPORCO. These additional components consist of a set of three impulse filters, one per colour channel, that will represent the low frequency image components when used together with a gradient penalty on the coefficient maps, as discussed below.
"""

D0 = util.convdicts()['RGB:8x8x3x64']
Di = np.zeros(D0.shape[0:2] + (3, 3), dtype=np.float32)
np.fill_diagonal(Di[0, 0], 1.0)
D = np.concatenate((Di, D0), axis=3)
"""
The problem is solved using class :class:`.admm.cbpdn.ConvL1L1Grd`, which implements a convolutional sparse coding problem with an $\ell_1$ data fidelity term, an $\ell_1$ regularisation term, and an additional gradient regularization term :cite:`wohlberg-2016-convolutional2`, as defined above. The regularization parameters for the $\ell_1$ and gradient terms are ``lmbda`` and ``mu`` respectively. Setting correct weighting arrays for these regularization terms is critical to obtaining good performance. For the $\ell_1$ norm, the weights on the filters that are intended to represent low frequency components are set to zero (we only want them penalised by the gradient term), and the weights of the remaining filters are set to zero. For the gradient penalty, all weights are set to zero except for those corresponding to the filters intended to represent low frequency components, which are set to unity.
"""
Exemplo n.º 4
0
"""Basic tvl2.TVL2Deconv usage example (denoising problem)"""

from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import tvl2


# Load reference image
img = util.ExampleImages().image('lena.grey', scaled=True)


# Construct test image
np.random.seed(12345)
imgn = img + np.random.normal(0.0, 0.04, img.shape)


# Set up TVDeconv options
lmbda = 0.04
opt = tvl2.TVL2Deconv.Options({'Verbose' : True, 'MaxMainIter' : 200,
                               'gEvalY' : False})


# Initialise and run TVL2Deconv object
b = tvl2.TVL2Deconv(np.ones((1,1)), imgn, lmbda, opt)
Exemplo n.º 5
0
from builtins import input
from builtins import range

import pyfftw   # See https://github.com/pyFFTW/pyFFTW/issues/40
import numpy as np

from sporco.dictlrn import cbpdndl
from sporco import util
from sporco import plot


"""
Load training images.
"""

exim = util.ExampleImages(scaled=True, zoom=0.5)
img1 = exim.image('barbara.png', idxexp=np.s_[10:522, 100:612])
img2 = exim.image('kodim23.png', idxexp=np.s_[:, 60:572])
img3 = exim.image('monarch.png', idxexp=np.s_[:, 160:672])
S = np.stack((img1, img2, img3), axis=3)


"""
Highpass filter training images.
"""

npd = 16
fltlmbd = 5
sl, sh = util.tikhonov_filter(S, fltlmbd, npd)

Exemplo n.º 6
0
"""Usage example: tvl1.TVL1Denoise (colour image)"""

from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import tvl1


# Load reference image
img = util.ExampleImages().image('standard', 'monarch.png',
                                 scaled=True)[:,160:672]


# Construct test image
np.random.seed(12345)
imgn = util.spnoise(img, 0.2)


# Set up TVL1Denoise options
lmbda = 8e-1
opt = tvl1.TVL1Denoise.Options({'Verbose' : True, 'MaxMainIter' : 200,
                                'RelStopTol' : 5e-3, 'gEvalY' : False})


# Initialise and run TVL1Denoise object
b = tvl1.TVL1Denoise(imgn, lmbda, opt, caxis=2)
Exemplo n.º 7
0
"""Usage example: cbpdn.ConvBPDN (colour images, colour dictionary)"""

from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import cbpdn
import sporco.metric as sm

# Load demo image
img = util.ExampleImages().image('standard',
                                 'barbara.png',
                                 scaled=True,
                                 zoom=0.5)[27:283, 55:311]

# Highpass filter test image
npd = 16
fltlmbd = 10
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)

# Load dictionary
D = util.convdicts()['RGB:8x8x3x64']

# Set up ConvBPDN options
lmbda = 1e-2
opt = cbpdn.ConvBPDN.Options({
    'Verbose': True,
    'MaxMainIter': 200,
Exemplo n.º 8
0
    crop_op.append(slice(0, l))
crop_op = tuple(crop_op)
Dr0 = cnvrep.getPcn(Dr0.shape, cri.Nv, cri.dimN, cri.dimCd, zm=False)(cnvrep.zpad(Dr0, cri.Nv))[crop_op]
# Dr = normalize(Dr, axis=cri.axisM)

# Xr = l2norm_minimize(cri, Dr0, Shr)
# Dr = mysolve(cri, Dr0, Xr, Shr, 1e-4, maxitr=50, debug_dir='./debug')
# # Dr = nakashizuka_solve(cri, Dr0, Xr, Shr, debug_dir='./debug')
# # Dr = sporcosolve(cri, Dr, Shr)
# # fig = plot.figure(figsize=(7, 7))
# # plot.imview(util.tiledict(Dr.squeeze()), fig=fig)
# # fig.savefig('dict.png')
# # # evaluate_result(cri, Dr0, Dr, Shr, Sr_add=Slr)


exim1 = util.ExampleImages(scaled=True, zoom=0.5, pth='./')
S1_test = exim1.image('couple.tiff')
exim2 = util.ExampleImages(scaled=True, zoom=1, pth='./')
S2_test = exim2.image('LENNA.bmp')
S_test = np.dstack((S1_test, S2_test))
cri_test = cnvrep.CSC_ConvRepIndexing(D, S_test)
Sl_test, Sh_test = util.tikhonov_filter(S_test, 5, 16)
Slr_test = np.asarray(Sl_test.reshape(cri_test.shpS), dtype=S_test.dtype)
Shr_test = np.asarray(Sh_test.reshape(cri_test.shpS), dtype=S_test.dtype)

# evaluate_result(cri, Dr0, Dr, Shr_test, Sr_add=Slr_test, lmbda=5e-3)

outdir = './no_low-pass'


#--------実験を行う手法を指定-------
Exemplo n.º 9
0
          ax=ax[0])
plot.contour(z,
             x,
             y,
             xlbl='x',
             ylbl='y',
             title='Contour Plot Example',
             fig=fig,
             ax=ax[1])
fig.show()
"""
Load an example colour image and create a corresponding grayscale version.
"""

imgc = util.ExampleImages().image('kodim23.png',
                                  scaled=True,
                                  idxexp=np.s_[150:500, 30:380])
imgg = signal.rgb2gray(imgc)
"""
Display the example colour image.
"""

plot.imview(imgc, title='Image View Example', fgsz=(6, 6))
"""
Display the grayscale image with a false-colour colour map, with a
colour bar display of the color map.
"""

plot.imview(imgg,
            cmap=plot.cm.coolwarm,
            title='Image View Example',
Exemplo n.º 10
0
from __future__ import division
from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np
from scipy.ndimage.interpolation import zoom

from sporco.admm import bpdn
from sporco.admm import cmod
from sporco.admm import dictlrn
from sporco import util
from sporco import plot

# Training images
exim = util.ExampleImages(scaled=True)
img1 = exim.image('lena.grey')
img2 = exim.image('barbara.grey')
img3 = exim.image('kiel.grey')
img4 = util.rgb2gray(exim.image('mandrill'))
img5 = exim.image('man.grey')[100:612, 100:612]

# Reduce images size to speed up demo script
S1 = zoom(img1, 0.5)
S2 = zoom(img2, 0.5)
S3 = zoom(img3, 0.5)
S4 = zoom(img4, 0.5)
S5 = zoom(img5, 0.5)

# Extract all 8x8 image blocks, reshape, and subtract block means
S = util.imageblocks((S1, S2, S3, S4, S5), (8, 8))
Exemplo n.º 11
0
 def test_17(self):
     pth = os.path.join(os.path.dirname(util.__file__), 'data')
     ei = util.ExampleImages(pth=pth)
     im = ei.images()
     assert len(im) > 0
Exemplo n.º 12
0
from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import cbpdn
import sporco.metric as sm

import sporco_cuda.cbpdn as cucbpdn

# Load demo image
img = util.ExampleImages().image('barbara.png', scaled=True, gray=True)

# Highpass filter test image
npd = 16
fltlmbd = 5
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)

# Apply random mask to highpass component
frc = 0.5
np.random.seed(12345)
msk = util.rndmask(img.shape, frc, dtype=np.float32)
shw = msk * sh

# Load dictionary
D = util.convdicts()['G:12x12x72']
Exemplo n.º 13
0
# Get test image
url = 'http://www.math.purdue.edu/~lucier/PHOTO_CD/D65_GREY_TIFF_IMAGES/'\
      'IMG0023.tif'
dir = os.path.join(tempfile.gettempdir(), 'images')
if not os.path.exists(dir):
    os.mkdir(dir)
pth = os.path.join(dir, 'IMG0023.tif')
if not os.path.isfile(pth):
    img = util.netgetdata(url)
    f = open(pth, 'wb')
    f.write(img.read())
    f.close()


# Load demo image
ei = util.ExampleImages(pth=dir)
img = ei.image('IMG0023.tif', scaled=True, zoom=0.5)


# Load dictionary
Db = util.convdicts()['G:12x12x72']
# Append impulse filter for lowpass component representation
di = np.zeros(Db.shape[0:2] + (1,), dtype=np.float32)
di[0, 0] = 1
D = np.concatenate((di, Db), axis=2)
# Weights for l1 norm: no regularization on impulse filter
wl1 = np.ones((1,)*2 + (D.shape[2:]), dtype=np.float32)
wl1[..., 0] = 0.0
# Weights for l2 norm of gradient: regularization only on impulse filter
wgr = np.zeros((D.shape[2]), dtype=np.float32)
wgr[0] = 1.0
Exemplo n.º 14
0
from builtins import input

import pyfftw  # See https://github.com/pyFFTW/pyFFTW/issues/40
import numpy as np

from sporco.admm import tvl2
from sporco.dictlrn import cbpdndl
from sporco.dictlrn import cbpdndlmd
from sporco import util
from sporco import signal
from sporco import plot
"""
Load training images.
"""

exim = util.ExampleImages(scaled=True, zoom=0.25, gray=True)
S1 = exim.image('barbara.png', idxexp=np.s_[10:522, 100:612])
S2 = exim.image('kodim23.png', idxexp=np.s_[:, 60:572])
S = np.dstack((S1, S2))
"""
Construct initial dictionary.
"""

np.random.seed(12345)
D0 = np.random.randn(8, 8, 32)
"""
Create random mask and apply to training images.
"""

frc = 0.5
W = signal.rndmask(S.shape[0:2] + (1, ), frc, dtype=np.float32)
Exemplo n.º 15
0
# with the package.
"""Basic cbpdn.ConvBPDN usage example (colour images, greyscale dictionary)"""

from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import cbpdn
import sporco.linalg as spl

# Load demo image
img = util.ExampleImages().image('lena', scaled=True, zoom=0.5)

# Highpass filter test image
npd = 16
fltlmbd = 10
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)

# Load dictionary
D = util.convdicts()['G:8x8x64']

# Set up ConvBPDN options
lmbda = 1e-2
opt = cbpdn.ConvBPDN.Options({
    'Verbose': True,
    'MaxMainIter': 200,
    'LinSolveCheck': True,
Exemplo n.º 16
0
from sporco import metric
from sporco import plot
from sporco import cuda
from sporco.admm import cbpdn


# If running in a notebook, try to use wurlitzer so that output from the CUDA
# code will be properly captured in the notebook.
sys_pipes = util.notebook_system_output()


"""
Load example image.
"""

img = util.ExampleImages().image('barbara.png', scaled=True, gray=True,
                                 idxexp=np.s_[10:522, 100:612])


"""
Highpass filter example image.
"""

npd = 16
fltlmbd = 20
sl, sh = signal.tikhonov_filter(img, fltlmbd, npd)


"""
Load dictionary.
"""
Exemplo n.º 17
0
"""Usage example: cbpdn.ConvElasticNet (greyscale images)"""

from __future__ import print_function
from builtins import input
from builtins import range

import numpy as np

from sporco import util
from sporco import plot
from sporco.admm import cbpdn
import sporco.metric as sm


# Load demo image
img = util.ExampleImages().image('standard', 'barbara.grey.png', scaled=True)


# Highpass filter test image
npd = 16
fltlmbd = 5
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)


# Load dictionary
D = util.convdicts()['G:12x12x36']


# Set up ConvBPDN options
lmbda = 1e-2
mu = 1e-3