Пример #1
0
def genexamples(_):

    import zipfile
    from sporco.util import netgetdata

    url = 'https://codeload.github.com/bwohlberg/sporco-notebooks/zip/master'
    print('Constructing docs from example scripts')
    if on_rtd:
        epth = '../../examples'
    else:
        epth = os.path.join(rootpath, 'examples')
    spth = os.path.join(epth, 'scripts')
    npth = os.path.join(epth, 'notebooks')
    if on_rtd:
        rpth = 'examples'
    else:
        rpth = os.path.join(confpath, 'examples')

    if not os.path.exists(npth):
        print('Notebooks required for examples section not found: '
              'downloading from sporco-notebooks repo on GitHub')
        zipdat = netgetdata(url)
        zipobj = zipfile.ZipFile(zipdat)
        zipobj.extractall(path=epth)
        os.rename(os.path.join(epth, 'sporco-notebooks-master'),
                  os.path.join(epth, 'notebooks'))

    docntbk.make_example_scripts_docs(spth, npth, rpth)
Пример #2
0
def genexamples(_):

    import zipfile
    from sporco.util import netgetdata

    url = 'https://codeload.github.com/bwohlberg/sporco-notebooks/zip/master'
    print('Constructing docs from example scripts')
    if on_rtd:
        epth = '../../examples'
    else:
        epth = os.path.join(rootpath, 'examples')
    spth = os.path.join(epth, 'scripts')
    npth = os.path.join(epth, 'notebooks')
    if on_rtd:
        rpth = 'examples'
    else:
        rpth = os.path.join(confpath, 'examples')

    if not os.path.exists(npth):
        print('Notebooks required for examples section not found: '
              'downloading from sporco-notebooks repo on GitHub')
        zipdat = netgetdata(url)
        zipobj = zipfile.ZipFile(zipdat)
        zipobj.extractall(path=epth)
        os.rename(os.path.join(epth, 'sporco-notebooks-master'),
                  os.path.join(epth, 'notebooks'))

    docntbk.make_example_scripts_docs(spth, npth, rpth)
Пример #3
0
from sporco import util
from sporco import plot
import sporco.linalg as spl
from sporco.admm import cbpdn
import sporco_cuda.cbpdn as cucbpdn


# Get test image
url = 'http://www.math.purdue.edu/~lucier/PHOTO_CD/D65_GREY_TIFF_IMAGES/'\
      'IMG0023.tif'
dir = os.path.join(tempfile.gettempdir(), 'images')
if not os.path.exists(dir):
    os.mkdir(dir)
pth = os.path.join(dir, 'IMG0023.tif')
if not os.path.isfile(pth):
    img = util.netgetdata(url)
    f = open(pth, 'wb')
    f.write(img.read())
    f.close()


# Load demo image
ei = util.ExampleImages(pth=dir)
img = ei.image('IMG0023.tif', scaled=True, zoom=0.5)


# Highpass filter test image
npd = 16
fltlmbd = 5
sl, sh = util.tikhonov_filter(img, fltlmbd, npd)
Пример #4
0
        return np.pad(x, ((n, n), (n, n), (0, 0)), mode='symmetric')


def crop(x, n=8):

    return x[n:-n, n:-n]


"""
Load a reference hyperspectral image and corrupt it with 33% salt and pepper noise. (The call to ``np.random.seed`` ensures that the pseudo-random noise is reproducible.)
"""

pth = os.path.join(tempfile.gettempdir(), 'Indian_pines.mat')
if not os.path.isfile(pth):
    url = 'http://www.ehu.eus/ccwintco/uploads/2/22/Indian_pines.mat'
    vid = util.netgetdata(url)
    f = open(pth, 'wb')
    f.write(vid.read())
    f.close()

img = sio.loadmat(pth)['indian_pines'].astype(np.float32)
img = img[16:-17, 16:-17, 0:200:2]
img /= img.max()

np.random.seed(12345)
imgn = signal.spnoise(img, 0.33)
"""
We use a product dictionary :cite:`garcia-2018-convolutional2` constructed from a single-channel convolutional dictionary for the spatial axes of the image, and a truncated PCA basis for the spectral axis of the image. The impulse denoising problem is solved by appending an additional filter to the learned dictionary ``D0``, which is one of those distributed with SPORCO. This additional component consist of an impulse filters that will represent the low frequency image components when used together with a gradient penalty on the coefficient maps, as discussed below. The PCA basis is computed from the noise-free ground-truth image since the primary purpose of this script is as a code usage example: in a real application, the PCA basis would be estimated from a relevant noise-free image, or could be estimated from the noisy image via Robust PCA.
"""

D0 = util.convdicts()['G:8x8x32']
Пример #5
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""Get notebooks from sporco-notebooks on GitHub."""

from __future__ import print_function

import os
import sys
import zipfile

sys.path.insert(0, '..')
from sporco.util import netgetdata


if os.path.exists('notebooks'):
    print('Error: notebooks directory already exists')
else:
    url = 'https://codeload.github.com/bwohlberg/sporco-notebooks/zip/master'
    zipdat = netgetdata(url)
    zipobj = zipfile.ZipFile(zipdat)
    zipobj.extractall()
    os.symlink('sporco-notebooks-master', 'notebooks')
Пример #6
0
        return np.pad(x, ((n, n), (n, n), (0, 0)), mode='symmetric')


def crop(x, n=8):

    return x[n:-n, n:-n]


"""
Load a reference hyperspectral image and corrupt it with 33% salt and pepper noise. (The call to ``np.random.seed`` ensures that the pseudo-random noise is reproducible.)
"""

pth = os.path.join(tempfile.gettempdir(), 'Indian_pines.mat')
if not os.path.isfile(pth):
    url = 'http://www.ehu.eus/ccwintco/uploads/2/22/Indian_pines.mat'
    vid = util.netgetdata(url)
    f = open(pth, 'wb')
    f.write(vid.read())
    f.close()

img = sio.loadmat(pth)['indian_pines'].astype(np.float32)
img = img[16:-17, 16:-17, 0:200:2]
img /= img.max()

np.random.seed(12345)
imgn = util.spnoise(img, 0.33)


"""
We use a product dictionary :cite:`garcia-2018-convolutional2` constructed from a single-channel convolutional dictionary for the spatial axes of the image, and a standard (non-convolutional) dictionary for the spectral axis of the image. The impulse denoising problem is solved by appending an additional filter to the learned dictionary ``D0``, which is one of those distributed with SPORCO. This additional component consist of an impulse filters that will represent the low frequency image components when used together with a gradient penalty on the coefficient maps, as discussed below. The spectral axis dictionary is learned from the noise-free ground-truth image since the primary purpose of this script is as a code usage example: in a real application, this dictionary would be estimated from a relevant noise-free image.
"""
Пример #7
0
 def test_25(self):
     with pytest.raises(util.urlerror.URLError):
         dat = util.netgetdata('http://devnull')
Пример #8
0
 def test_24(self):
     with pytest.raises(ValueError):
         dat = util.netgetdata('http://devnull', maxtry=0)
Пример #9
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Get notebooks from sporco-notebooks on GitHub."""

from __future__ import print_function

import os
import sys
import zipfile

sys.path.insert(0, '..')
from sporco.util import netgetdata

if os.path.exists('notebooks'):
    print('Error: notebooks directory already exists')
else:
    url = 'https://codeload.github.com/bwohlberg/sporco-notebooks/zip/master'
    zipdat = netgetdata(url)
    zipobj = zipfile.ZipFile(zipdat)
    zipobj.extractall()
    os.symlink('sporco-notebooks-master', 'notebooks')
Пример #10
0
 def test_25(self):
     with pytest.raises(util.urlerror.URLError):
         dat = util.netgetdata('http://devnull')
Пример #11
0
 def test_24(self):
     with pytest.raises(ValueError):
         dat = util.netgetdata('http://devnull', maxtry=0)