def to_kde(grid):
  data = numpy.empty((7, 5), dtype=numpy.float32)
  for y in xrange(7):
    for x in xrange(5):
      data[y,x] = 1.0 if grid[y][x]!=' ' else blank_weight
  
  ret = MeanShift()
  ret.set_data(data, 'bb', 2)
  #ret.set_kernel('triangular')
  #ret.set_spatial('kd_tree')
  ret.scale_loo_nll()
  
  return ret
def resample(kde):
  data = kde.draws(samples)
  
  ret = MeanShift()
  ret.set_data(data, 'df')
  ret.set_kernel('triangular')
  ret.set_spatial('kd_tree')
  ret.scale_loo_nll()
  
  return ret
def ms_by_conc(power, code=''):
  ms = MeanShift()
  ms.quality = 0.5
  ms.set_data(numpy.array([1, 0, 0], dtype=numpy.float32), 'f')
  ms.set_kernel('fisher(%.1f%s)' % (2**power, code))
  ms.set_spatial('kd_tree')
  
  return ms
Exemple #4
0
    img = array2cv(img)
    cv.SaveImage('mult_k_%s_probs.png' % kernel, img)

    # Fake multiplication by multiplying the KDE images; visualise...
    img = imgs[0] * imgs[1] * imgs[2] * imgs[3]
    img *= 255.0 / img.max()

    img = array2cv(img)
    cv.SaveImage('mult_k_%s_probs_mult.png' % kernel, img)

    # Multiply them togther properly...
    p = ProgBar()
    output = numpy.empty((draw, 2), dtype=numpy.float32)
    for i in xrange(draw):
        p.callback(i, draw)
        MeanShift.mult(ms, output[i, :].reshape((1, -1)), fake=2)
    del p

    mult = to_ms(output)
    mult.copy_scale(ms[0])

    # Visualise the resulting distribution - the actual multiplication...
    img = numpy.zeros((draw_scale * size[0], draw_scale * size[1]),
                      dtype=numpy.float32)

    sweep0 = numpy.linspace(0, size[0], img.shape[0])
    sweep1 = numpy.linspace(0, size[1], img.shape[1])

    for ij, j in enumerate(sweep0):
        points = numpy.append(j * numpy.ones(sweep1.shape[0]).reshape((-1, 1)),
                              sweep1.reshape((-1, 1)),
# Parameters...
samples = 1024 * 16
samples_dir = 1024 * 2
scale = 16.0

dimensions = [1, 2, 3]
dir_dimensions = [2, 3, 4]
dir_conc = [2.0, 16.0, 128.0, 1024.0]



# Do the 'simple' kernels...
for kernel in ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']:
  for dim in dimensions:
    # Create a mean shift object with a single sample of the provided kernel type...
    ms = MeanShift()
    ms.set_data(numpy.array([0.0]*dim, dtype=numpy.float32), 'f')
    ms.set_kernel(kernel)
    ms.quality = 1.0
    
    # Draw lots of samples from it...
    sample = ms.draws(samples)
    
    # Get the probability of each...
    p1 = ms.probs(sample)
    
    # Throw away samples where p1 is 0 - they are a result of the range optimisation, and break the below...
    keep = p1>1e-6
    sample = sample[keep,:]
    p1 = p1[keep]
    
Exemple #6
0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy

from ms import MeanShift



# Create a dataset - equally spaced samples weighted by a Gaussian, such that it should estimate a Gaussian...
data = numpy.array(map(lambda _: random.normalvariate(0.0, 2.0), xrange(1000)))



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')

ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))



# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15).reshape((-1,1))
prob = ms.probs(sam)



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
Exemple #7
0
  
  cos_theta = numpy.cos(theta)
  sin_theta = numpy.sin(theta)
  
  which = [deflection, cos_theta, sin_theta]
  chunk = numpy.concatenate((which[ex_dim].reshape((-1,1)), which[(ex_dim+1)%3].reshape((-1,1)), which[(ex_dim+2)%3].reshape((-1,1))), axis=1)
  
  data.append(chunk)
  
data = numpy.concatenate(data, axis=0)
data /= numpy.sqrt(numpy.square(data).sum(axis=1)).reshape((-1,1))



# Setup mean shift...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel('fisher(128.0)')
ms.set_spatial('kd_tree')



# Parameters for output images...
scale = 256
height = scale * 2
width = int(2.0 * numpy.pi * scale)



# Visualise the samples on a mercator projection...
Exemple #8
0
if len(sys.argv)<2:
  print "Need an image filename"
  sys.exit(1)

fn = sys.argv[1]



# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)



# Perform mean shift, with full clustering...
ms = MeanShift()
ms.set_data(image, 'bbf')

normal_kernels = ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy']
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial('iter_dual')
ms.set_balls('hash')

spatial_scale = 16.0
colour_scale = 32.0
ms.set_scale(numpy.array([1.0/spatial_scale, 1.0/spatial_scale, 1.0/colour_scale, 1.0/colour_scale, 1.0/colour_scale]))

ms.quality = 0.0
ms.ident_dist = 0.3
ms.merge_range = 0.6
ms.merge_check_step = 1
data = []

for camera in cameras:
  direction = numpy.random.vonmises(camera[3], conc, size=camera[0])
  x = numpy.random.normal(camera[1], sd_x, size=camera[0])
  y = numpy.random.normal(camera[2], sd_y, size=camera[0])
  
  block = numpy.concatenate((x.reshape((-1,1)), y.reshape((-1,1)), numpy.cos(direction).reshape((-1,1)), numpy.sin(direction).reshape((-1,1))), axis=1)
  data.append(block)



# Construct the mean shift object from it, including a composite kernel...
kde = []
for ds in data:
  ms = MeanShift()
  ms.set_data(ds, 'df')
  if len(kde)==0:
    ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
    ms.set_spatial('kd_tree')
    ms.set_scale(numpy.array([10.0,5.0,1.0,1.0]))
    ms.merge_range = 0.05
  else:
    ms.copy_all(kde[0])
    
  kde.append(ms)



# Visualise the data set...
for ind, ds in enumerate(data):
Exemple #10
0
#   http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy

from ms import MeanShift

# Create a dataset - draws from a Gaussian...
data = numpy.array(map(lambda _: random.normalvariate(0.0, 2.0), xrange(1000)))

# Setup three mean shift objects with the same data set and draw from them to demonstrate that you get the exact same output...
print 'Should all be the same:'
ms = map(lambda _: MeanShift(), xrange(3))
for i in xrange(len(ms)):
    ms[i].set_data(data, 'd')
    ms[i].set_kernel('gaussian')
    ms[i].set_spatial('kd_tree')

    print 'From', i, '|', ms[i].draw()
print

# Link the second to the first and draw again - first two should be different, third the same as the first...
ms[1].link_rng(ms[0])

print '#2 different:'
for i in xrange(len(ms)):
    print 'From', i, '|', ms[i].draw()
print
Exemple #11
0
if len(sys.argv)<2:
  print "Needs an image filename. An arbitrary second parameter causes it to store its results in a hsf5 file rather than dump them as images."
  sys.exit(1)

fn = sys.argv[1]



# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(image, 'bbf')

ms.set_kernel('uniform')
ms.set_spatial('iter_dual')
ms.set_balls('hash')



# Calculate the hierarchy of segments...
low_scale = 8.0
low_colour = 6.0
high_scale = 256.0
high_colour = 96.0

low  = numpy.array([low_scale, low_scale, low_colour, low_colour, low_colour])


# More data - from a line...
samples = 4 * 1024
x = numpy.random.random(samples)*9.0 - 4.5
y = numpy.random.beta(2.0, 2.0, samples) - 0.5

data2 = numpy.concatenate((x.reshape((-1,1)), y.reshape((-1,1))), axis=1)

data = numpy.concatenate((data1, data2), axis=0)
numpy.random.shuffle(data)


# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_spatial('kd_tree')
ms.set_scale(numpy.array([1.5, 1.5]))



# Do some visualisation...
dim = 512
image = numpy.zeros((dim, dim, 3), dtype=numpy.float32)

for r in xrange(data.shape[0]):
  loc = data[r,:]
  loc = (loc + 5.0) / 10.0
  loc *= dim
  image[int(loc[1]+0.5), int(loc[0]+0.5), :] = 64.0
Exemple #13
0
# Parameters...
samples = 1024 * 1024
dimensions = [1, 2, 3]
dir_dimensions = [2, 3, 4]
dir_area = [numpy.pi * 2.0, numpy.pi * 4.0, 2.0 * numpy.pi**2]
dir_conc = [2.0, 16.0, 128.0, 1024.0]

# Do the 'simple' kernels...
for kernel in [
        'uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian',
        'cauchy', 'logistic'
]:
    for dim in dimensions:
        # Create a mean shift object with a single sample of the provided kernel type...
        ms = MeanShift()
        ms.set_data(numpy.array([0.0] * dim, dtype=numpy.float32), 'f')
        ms.set_kernel(kernel)
        ms.quality = 1.0

        # Create a uniform sample over a suitably large region (Yes I am assuming I got the uniform kernel right!)...
        uniform = MeanShift()
        uniform.set_data(numpy.array([0.0] * dim, dtype=numpy.float32), 'f')
        uniform.set_kernel('uniform')
        uniform.set_scale(numpy.ones(dim) / ms.get_range())
        sample = uniform.draws(samples)
        sp = uniform.prob(sample[0, :])

        # Evaluate the probabilities of the uniform set...
        p = ms.probs(sample)
Exemple #14
0
from ms import MeanShift

# Check an image filename has been provided on the command line...
if len(sys.argv) < 2:
    print "Need an image filename"
    sys.exit(1)

fn = sys.argv[1]

# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)

# Perform mean shift, with full clustering...
ms = MeanShift()
ms.set_data(image, 'bbf')

ms.set_kernel(random.choice(filter(lambda s: s != 'fisher', ms.kernels())))
ms.set_spatial('iter_dual')
ms.set_balls('hash')

spatial_scale = 16.0
colour_scale = 32.0
ms.set_scale(
    numpy.array([
        1.0 / spatial_scale, 1.0 / spatial_scale, 1.0 / colour_scale,
        1.0 / colour_scale, 1.0 / colour_scale
    ]))

ms.quality = 0.0
Exemple #15
0
image = numpy.zeros((dim, dim, 3), dtype=numpy.float32)

for r in xrange(data.shape[0]):
    loc = data[r, :]
    loc = (loc + size) / (2.0 * size)
    loc *= dim
    try:
        image[int(loc[1] + 0.5), int(loc[0] + 0.5), :] = 255.0
    except:
        pass  # Deals with out of range values.

image = array2cv(image)
cv.SaveImage('draw_input.png', image)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
normal_kernels = [
    'uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy',
    'logistic'
]
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial('kd_tree')

print 'kernel = %s' % ms.get_kernel()

# Choose a reasonable size...
print 'Selecting size using loo:'
p = ProgBar()
ms.scale_loo_nll(callback=p.callback)
del p
Exemple #16
0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy

from ms import MeanShift

# Create a dataset - equally spaced samples weighted by a Gaussian, such that it should estimate a Gaussian...
x = numpy.arange(-5.0, 5.0, 0.02)
y = numpy.exp(-0.5 * x**2.0 / 2.0)

data = numpy.concatenate((x.reshape((-1, 1)), y.reshape((-1, 1))), axis=1)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')

normal_kernels = [
    'uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy',
    'logistic'
]
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))
ms.set_scale(numpy.ones(2), 1)

# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15)
prob = numpy.array(map(lambda v: ms.prob(numpy.array([v, 1.0])), sam))

# Print out basic stats...
Exemple #17
0
from ms import MeanShift

# Check an image filename has been provided on the command line...
if len(sys.argv) < 2:
    print "Needs an image filename. An arbitrary second parameter causes it to store its results in a hsf5 file rather than dump them as images."
    sys.exit(1)

fn = sys.argv[1]

# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(image, 'bbf')

ms.set_kernel('uniform')
ms.set_spatial('iter_dual')
ms.set_balls('hash')

# Calculate the hierarchy of segments...
low_scale = 8.0
low_colour = 6.0
high_scale = 256.0
high_colour = 96.0

low = numpy.array([low_scale, low_scale, low_colour, low_colour, low_colour])
high = numpy.array(
    [high_scale, high_scale, high_colour, high_colour, high_colour])
for r in xrange(data.shape[0]):
  loc = data[r,:]
  loc = (loc + size) / (2.0*size)
  loc *= dim
  try:
    image[int(loc[1]+0.5), int(loc[0]+0.5), :] = 255.0
  except: pass # Deals with out of range values.

image = array2cv(image)
cv.SaveImage('bandwidth_samples.png', image)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_kernel('gaussian')
ms.set_spatial('kd_tree')



# Progress bar version of scale_loo_nll...
def scale_loo_nll():
  p = ProgBar()
  ms.scale_loo_nll(callback = p.callback)
  del p

  

# Iterate and try out a bunch of different algorithms...
Exemple #19
0
import numpy

from ms import MeanShift



# Create a dataset - equally spaced samples weighted by a Gaussian, such that it should estimate a Gaussian...
x = numpy.arange(-5.0, 5.0, 0.02)
y = numpy.exp(-0.5 * x**2.0 / 2.0)

data = numpy.concatenate((x.reshape((-1,1)), y.reshape((-1,1))), axis=1)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')

normal_kernels = ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))
ms.set_scale(numpy.ones(2), 1)



# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15)
prob = numpy.array(map(lambda v: ms.prob(numpy.array([v,1.0])), sam))


Exemple #20
0


# Add weights to it...
weights = numpy.zeros(quantity*4)
weights[0:quantity] = 0.1 / 0.4
weights[quantity:2*quantity] = 0.2 / 0.4
weights[2*quantity:3*quantity] = 0.3 / 0.4
weights[3*quantity:4*quantity] = 0.4 / 0.4

data = numpy.concatenate((data, weights.reshape((-1,1))), axis=1)



# Use mean shift to cluster it...
ms = MeanShift()
ms.set_data(data, 'df', 2)

normal_kernels = ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))

modes, indices = ms.cluster()



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (ms.quality, ms.epsilon, ms.iter_cap)
print 'weight = %.1f' % ms.weight()
Exemple #21
0
from ms import MeanShift

# Create a data set, 2D...
means = [[1.0, 1.0], [4.0, 4.0], [4.0, 1.0], [1.0, 4.0]]
quantity = 250

a = numpy.random.multivariate_normal(means[0], 0.1 * numpy.eye(2), quantity)
b = numpy.random.multivariate_normal(means[1], 0.2 * numpy.eye(2), quantity)
c = numpy.random.multivariate_normal(means[2], 0.3 * numpy.eye(2), quantity)
d = numpy.random.multivariate_normal(means[3], 0.4 * numpy.eye(2), quantity)

data = numpy.concatenate((a, b, c, d), axis=0)

# Use mean shift to cluster it...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel(random.choice(filter(lambda s: s != 'fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))

modes, indices = ms.cluster()

# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (
    ms.quality, ms.epsilon, ms.iter_cap)
print

# Print out a grid of cluster assignments...
Exemple #22
0
import random

import cv
from utils.cvarray import *
import numpy

from ms import MeanShift

# NOTE: This test assumes you have a directory called nist containing the nist images in it, with black for background, white for foreground.

# Get a list of files to process...
fn_list = os.listdir('nist')
fn_list = filter(lambda fn: fn[-9:] != '_line.png', fn_list)

# Setup a mean shift object for use...
ms = MeanShift()
ms.set_spatial('iter_dual')

# Process each in turn...
for fn in fn_list:
    print 'Doing %s...' % fn
    fn = os.path.join('nist', fn)

    # Load image and binarise...
    image = cv.LoadImage(fn)
    image = cv2array(image)
    image = image[:, :, 0] > 128

    # Finish setup of meanshift object...
    ms.set_data(image, 'bb', 2)
    ms.set_scale(numpy.array([0.65, 0.65]))
Exemple #23
0
import random
import numpy

from ms import MeanShift



# Create some data (!)...
data = ([-2] * 3) + ([0] * 8) + ([1] * 4) + ([2] * 5)
data = numpy.array(data, dtype=numpy.int32)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')
ms.set_kernel('discrete')
ms.set_spatial('kd_tree')



# Iterate and calculate the probability at a bunch of points, then plot...
sam = numpy.arange(-2.5, 2.5, 0.1)
prob = numpy.array(map(lambda v: ms.prob(numpy.array([v])), sam))

print 'Distribution:'
for threshold in numpy.arange(prob.max(), 0.0, -prob.max()/10.0):
  print ''.join(map(lambda p: '|' if p>threshold else ' ', prob))

Exemple #24
0

# Parameters...
samples = 1024 * 1024
dimensions = [1, 2, 3]
dir_dimensions = [2, 3, 4]
dir_area = [numpy.pi * 2.0, numpy.pi * 4.0, 2.0 * numpy.pi**2]
dir_conc = [2.0, 16.0, 128.0, 1024.0]



# Do the 'simple' kernels...
for kernel in ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']:
  for dim in dimensions:
    # Create a mean shift object with a single sample of the provided kernel type...
    ms = MeanShift()
    ms.set_data(numpy.array([0.0]*dim, dtype=numpy.float32), 'f')
    ms.set_kernel(kernel)
    ms.quality = 1.0
    
    # Create a uniform sample over a suitably large region (Yes I am assuming I got the uniform kernel right!)...
    uniform = MeanShift()
    uniform.set_data(numpy.array([0.0]*dim, dtype=numpy.float32), 'f')
    uniform.set_kernel('uniform')
    uniform.set_scale(numpy.ones(dim) / ms.get_range())
    sample = uniform.draws(samples)
    sp = uniform.prob(sample[0,:])
    
    # Evaluate the probabilities of the uniform set...
    p = ms.probs(sample)
    
Exemple #25
0
y = radius * numpy.sin(theta)

data1 = numpy.concatenate((x.reshape((-1, 1)), y.reshape((-1, 1))), axis=1)

# More data - from a line...
samples = 4 * 1024
x = numpy.random.random(samples) * 9.0 - 4.5
y = numpy.random.beta(2.0, 2.0, samples) - 0.5

data2 = numpy.concatenate((x.reshape((-1, 1)), y.reshape((-1, 1))), axis=1)

data = numpy.concatenate((data1, data2), axis=0)
numpy.random.shuffle(data)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_spatial('kd_tree')
ms.set_scale(numpy.array([1.5, 1.5]))

# Do some visualisation...
dim = 512
image = numpy.zeros((dim, dim, 3), dtype=numpy.float32)

for r in xrange(data.shape[0]):
    loc = data[r, :]
    loc = (loc + 5.0) / 10.0
    loc *= dim
    image[int(loc[1] + 0.5), int(loc[0] + 0.5), :] = 64.0

print 'Projecting samples to line...'
Exemple #26
0
from ms import MeanShift



# NOTE: This test assumes you have a directory called nist containing the nist images in it, with black for background, white for foreground.



# Get a list of files to process...
fn_list = os.listdir('nist')
fn_list = filter(lambda fn: fn[-9:]!='_line.png', fn_list)



# Setup a mean shift object for use...
ms = MeanShift()
ms.set_spatial('iter_dual')



# Process each in turn...
for fn in fn_list:
  print 'Doing %s...' % fn
  fn = os.path.join('nist',fn)
  
  # Load image and binarise...
  image = cv.LoadImage(fn)
  image = cv2array(image)
  image = image[:,:,0] > 128

  # Finish setup of meanshift object...
Exemple #27
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

#   http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import numpy
import numpy.random
from scipy.misc import imsave

from utils.prog_bar import ProgBar

from ms import MeanShift

# Visualises each kernel...
for kernel in MeanShift.kernels():
    if MeanShift.info_config(kernel) != None:
        continue

    a = MeanShift()
    a.set_data(numpy.array([0.0], dtype=numpy.float32), 'f')
    a.set_kernel(kernel)
    a.quality = 1.0

    image = numpy.ones((384, 1024), dtype=numpy.float32)
    x = numpy.linspace(-4, 4, image.shape[1])
    iy, ix = numpy.meshgrid(numpy.arange(image.shape[0]),
                            numpy.arange(image.shape[1]),
                            indexing='ij')

    ya = a.probs(x[:, None])
Exemple #28
0
from utils.cvarray import *

from ms import MeanShift

# Sample from a circle + noise model to create some data...
samples = 8192
theta = 2.0 * numpy.pi * numpy.random.random(samples)
radius = 3.0 + (numpy.random.beta(2.0, 2.0, samples) - 0.5)

x = radius * numpy.cos(theta)
y = radius * numpy.sin(theta)

data = numpy.concatenate((x.reshape((-1, 1)), y.reshape((-1, 1))), axis=1)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_spatial('kd_tree')

# Loop the circle; project points to it...
steps = 16
for i in xrange(steps):
    angle = 2.0 * numpy.pi * float(i) / float(steps)
    rad = 3.0 + (numpy.random.beta(2.0, 2.0) - 0.5)

    x = rad * numpy.cos(angle)
    y = rad * numpy.sin(angle)

    proj = ms.manifold(numpy.array([x, y]), 1)

    print '(%.3f,%.3f) -> (%.3f,%.3f) | rad: %.3f -> %.3f' % (
Exemple #29
0
alpha_c1 = 4.0
beta_c1 = 1.0
alpha_c2 = 5.0
beta_c2 = 30.0
count_c = 50

c = numpy.concatenate((numpy.random.beta(alpha_c1, beta_c1, count_c).reshape((-1,1)), numpy.random.beta(alpha_c2, beta_c2, count_c).reshape((-1,1))), axis=1)

data = numpy.concatenate((a,b,c), axis=0)
scale = 6.0
data *= scale



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')

normal_kernels = ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (ms.quality, ms.epsilon, ms.iter_cap)
print

Exemple #30
0
from ms import MeanShift

# Check an image filename has been provided on the command line...
if len(sys.argv) < 2:
    print "Need an image filename"
    sys.exit(1)

fn = sys.argv[1]

# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)

# Perform mean shift, with full clustering...
ms = MeanShift()
ms.set_data(image, 'bbf')

ms.set_spatial('iter_dual')

spatial_scale = 4.0
colour_scale = 32.0
ms.set_scale(
    numpy.array([
        1.0 / spatial_scale, 1.0 / spatial_scale, 1.0 / colour_scale,
        1.0 / colour_scale, 1.0 / colour_scale
    ]))

ms.quality = 0.0

print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
Exemple #31
0
from utils.cvarray import *
from utils.prog_bar import ProgBar

from ms import MeanShift

# Test the mirrored version of the von_mises Fisher distribution, this time in 5D...

# Create a dataset - just a bunch of points in one direction, so we can test the mirroring effect (Abuse MeanShift object to do this)...
print 'Mirrored draws:'

vec = numpy.array([1.0, 0.5, 0.0, -0.5, -1.0])
vec /= numpy.sqrt(numpy.square(vec).sum())

print 'Base dir =', vec

draw = MeanShift()
draw.set_data(vec, 'f')
draw.set_kernel('fisher(256.0)')

data = draw.draws(32)

#print 'Input:'
#print data

# Create a mean shift object from the draws, but this time with a mirror_fisher kernel...
mirror = MeanShift()
mirror.set_data(data, 'df')
mirror.set_kernel('mirror_fisher(64.0)')

resample = mirror.draws(16)
Exemple #32
0
from ms import MeanShift

# Create a data set...
means = [[3.0, 2.0, 4.0, -2.0], [2.0, -1.0, 8.0, 1.0], [5.0, -1.0, 8.0, 2.5],
         [0.0, 0.0, 4.0, 0.5]]
quantity = 250

a = numpy.random.multivariate_normal(means[0], 0.1 * numpy.eye(4), quantity)
b = numpy.random.multivariate_normal(means[1], 0.2 * numpy.eye(4), quantity)
c = numpy.random.multivariate_normal(means[2], 0.3 * numpy.eye(4), quantity)
d = numpy.random.multivariate_normal(means[3], 0.4 * numpy.eye(4), quantity)

data = numpy.concatenate((a, b, c, d), axis=0)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel('gaussian')
ms.set_spatial('kd_tree')

# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (
    ms.quality, ms.epsilon, ms.iter_cap)
print

# Calculate the modes for all vectors, then print out some randomly selected convergances...
res = ms.modes_data()
Exemple #33
0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy

from ms import MeanShift



# Create a dataset - equally spaced samples weighted by a Gaussian, such that it should estimate a Gaussian...
data = numpy.array(map(lambda _: random.normalvariate(0.0, 2.0), xrange(1000)))



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')

ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))



# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15).reshape((-1,1))
prob = ms.probs(sam)



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
Exemple #34
0
import numpy.random

from ms import MeanShift



# Create a simple data set...
a = numpy.random.normal(3.0, 1.0, 100)
b = numpy.random.normal(5.0, 0.5, 50)

data = numpy.concatenate((a,b))



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')

ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (ms.quality, ms.epsilon, ms.iter_cap)
print

# Query the mode of various points...
for x in numpy.arange(0.0, 7.0, 0.4):
Exemple #35
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

#   http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy

from ms import MeanShift

# Create a dataset - draws from a Gaussian...
data = numpy.array(map(lambda _: random.normalvariate(0.0, 2.0), xrange(1000)))

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')

normal_kernels = [
    'uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy',
    'logistic'
]
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))

# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15).reshape((-1, 1))
prob = ms.probs(sam)

# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
Exemple #36
0
for r in xrange(data.shape[0]):
  loc = data[r,:]
  loc = (loc + size) / (2.0*size)
  loc *= dim
  try:
    image[int(loc[1]+0.5), int(loc[0]+0.5), :] = 255.0
  except: pass # Deals with out of range values.

image = array2cv(image)
cv.SaveImage('draw_input.png', image)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
normal_kernels = ['uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy', 'logistic']
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial('kd_tree')

print 'kernel = %s' % ms.get_kernel()



# Choose a reasonable size...
print 'Selecting size using loo:'
p = ProgBar()
ms.scale_loo_nll(callback = p.callback)
del p
Exemple #37
0
i = 0
for y in xrange(samples):
    for x in xrange(samples):
        data[i, 0] = y / float(samples - 1)
        data[i, 1] = x / float(samples - 1)

        dist = numpy.sqrt((data[i, 0] - 0.5)**2 +
                          (data[i, 1] - 0.5)**2) * numpy.pi * 7.0
        data[i,
             2] = (1.0 + numpy.sin(dist)) / (6.0 +
                                             numpy.abs(numpy.sqrt(dist) - 3.0))

        i += 1

ms = MeanShift()
ms.set_data(data, 'df', 2)
ms.set_kernel('triangular')
ms.set_spatial('kd_tree')

# Choose a reasonable size...
print 'Selecting size using loo:'
p = ProgBar()
ms.scale_loo_nll(callback=p.callback)
del p

# Plot the pdf, for reference...
image = numpy.zeros((pixels, pixels, 3), dtype=numpy.float32)

print 'Rendering probability map:'
p = ProgBar()
def ms_by_conc(power, code=''):
    ms = MeanShift()
    ms.quality = 0.5
    ms.set_data(numpy.array([1, 0, 0], dtype=numpy.float32), 'f')
    ms.set_kernel('fisher(%.1f%s)' % (2**power, code))
    ms.set_spatial('kd_tree')

    return ms


options = map(ms_by_conc, xrange(8)) + [
    ms_by_conc(8, 'c'), ms_by_conc(8, 'a')
] + map(ms_by_conc, xrange(9, 16))

# Create it and do the bandwidth estimation...
ms = MeanShift()
ms.set_data(data, 'df')

p = ProgBar()
best = ms.scale_loo_nll_array(options, p.callback)
del p

print 'Selected kernel =', ms.get_kernel()
print 'LOO score =', best

# Visualise the best option...
visualise('bandwidth_fisher.png', ms)

# Also visualise correct vs approximate, for sanity checking...
for option in [ms_by_conc(8, 'c'), ms_by_conc(8, 'a')]:  #options:
    ms.copy_all(option)
Exemple #39
0
data = []

for camera in cameras:
    direction = numpy.random.vonmises(camera[3], conc, size=camera[0])
    x = numpy.random.normal(camera[1], sd_x, size=camera[0])
    y = numpy.random.normal(camera[2], sd_y, size=camera[0])

    block = numpy.concatenate((x.reshape((-1, 1)), y.reshape(
        (-1, 1)), numpy.cos(direction).reshape(
            (-1, 1)), numpy.sin(direction).reshape((-1, 1))),
                              axis=1)
    data.append(block)
data = numpy.concatenate(data, axis=0)

# Construct the mean shift object from it, including a composite kernel...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
ms.set_spatial('kd_tree')
ms.set_scale(numpy.array([10.0, 5.0, 1.0, 1.0]))
ms.merge_range = 0.05

# Print out information in a convoluted way to test some convoluted features!..
ms2 = MeanShift()
ms2.copy_kernel(ms)
print 'Kernel:', ms2.get_kernel()
del ms2

# For our first trick visualise the data set...
img = numpy.zeros((size, size, 3), dtype=numpy.float32)
Exemple #40
0
  
  cos_theta = numpy.cos(theta)
  sin_theta = numpy.sin(theta)
  
  which = [deflection, cos_theta, sin_theta]
  chunk = numpy.concatenate((which[ex_dim].reshape((-1,1)), which[(ex_dim+1)%3].reshape((-1,1)), which[(ex_dim+2)%3].reshape((-1,1))), axis=1)
  
  data.append(chunk)
  
data = numpy.concatenate(data, axis=0)
data /= numpy.sqrt(numpy.square(data).sum(axis=1)).reshape((-1,1))



# Setup mean shift...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel('fisher', 32.0)
ms.set_spatial(random.choice(ms.spatials()))



# Make a mercator projection probability map, save it out...
## Parameters...
scale = 128
height = scale * 2
width = int(2.0 * numpy.pi * scale)

## Locations to sample...
x_to_nx = numpy.cos(numpy.linspace(0.0, 2.0 * numpy.pi, width, False))
Exemple #41
0
# Check an image filename has been provided on the command line...
if len(sys.argv) < 2:
    print "Need an image filename"
    sys.exit(1)

fn = sys.argv[1]


# Load the image into a numpy array...
image = cv.LoadImage(fn)
image = cv2array(image)


# Perform mean shift, with full clustering...
ms = MeanShift()
ms.set_data(image, "bbf")

ms.set_spatial("iter_dual")

spatial_scale = 4.0
colour_scale = 32.0
ms.set_scale(
    numpy.array([1.0 / spatial_scale, 1.0 / spatial_scale, 1.0 / colour_scale, 1.0 / colour_scale, 1.0 / colour_scale])
)

ms.quality = 0.0

print "exemplars = %i; features = %i" % (ms.exemplars(), ms.features())

Exemple #42
0
image = numpy.zeros((dim, dim, 3), dtype=numpy.float32)

for r in xrange(data.shape[0]):
    loc = data[r, :]
    loc = (loc + size) / (2.0 * size)
    loc *= dim
    try:
        image[int(loc[1] + 0.5), int(loc[0] + 0.5), :] = 255.0
    except:
        pass  # Deals with out of range values.

image = array2cv(image)
cv.SaveImage('bandwidth_samples.png', image)

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_kernel('gaussian')
ms.set_spatial('kd_tree')


# Progress bar version of scale_loo_nll...
def scale_loo_nll():
    p = ProgBar()
    ms.scale_loo_nll(callback=p.callback)
    del p


# Iterate and try out a bunch of different algorithms...
for name, alg in [('human_picked',
                   lambda: ms.set_scale(numpy.array([5.0, 5.0]))),
Exemple #43
0
# Draw a data set...
data = []

for camera in cameras:
  direction = numpy.random.vonmises(camera[3], conc, size=camera[0])
  x = numpy.random.normal(camera[1], sd_x, size=camera[0])
  y = numpy.random.normal(camera[2], sd_y, size=camera[0])
  
  block = numpy.concatenate((x.reshape((-1,1)), y.reshape((-1,1)), numpy.cos(direction).reshape((-1,1)), numpy.sin(direction).reshape((-1,1))), axis=1)
  data.append(block)
data = numpy.concatenate(data, axis=0)



# Construct the mean shift object from it, including a composite kernel...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
ms.set_spatial('kd_tree')
ms.set_scale(numpy.array([10.0,5.0,1.0,1.0]))
ms.merge_range = 0.05



# Print out information in a convoluted way to test some convoluted features!..
ms2 = MeanShift()
ms2.copy_kernel(ms)
print 'Kernel:', ms2.get_kernel()
del ms2



# Create a mean shift object with some data and a composite kernel - data is a position using radial coordinates followed by two angles; also using this for testing the conversion system...
print 'a:'

direction = numpy.concatenate((numpy.random.normal(0.0, 0.1, samples//2), numpy.random.normal(numpy.pi, 1.0, samples//2)))
radius = numpy.concatenate((numpy.random.normal(2.0, 0.5, samples//2), numpy.random.normal(2.0, 0.5, samples//4), numpy.random.normal(3.0, 0.5, samples//4)))
ang_a = numpy.concatenate((numpy.random.normal(0.0, 0.3, samples//4), numpy.random.normal(numpy.pi, 0.3, samples//4), numpy.random.normal(0.5*numpy.pi, 0.3, samples//4), numpy.random.normal(1.5*numpy.pi, 0.3, samples//4)))
ang_b = numpy.concatenate((numpy.random.normal(0.0, 0.6, samples//4), numpy.random.normal(0.5*numpy.pi, 0.5, samples//2), numpy.random.normal(0.0, 0.6, samples//4)))

data = numpy.concatenate((direction[:,numpy.newaxis], radius[:,numpy.newaxis], ang_a[:,numpy.newaxis], ang_b[:,numpy.newaxis]), axis=1)

kernel = 'composite(2:composite(1:gaussian, 1:gaussian), 2:fisher(%(ca)s), 2:mirror_fisher(%(cb)s))' # Don't ever do this: Just wanted to check a composite kernel within a composite kernel doesn't break things!

ms = MeanShift()
ms.set_data(data, 'df', None, 'rAA')
ms.set_kernel(kernel % {'ca' : 64.0, 'cb' : 64.0})



# Use the MeanShiftCompositeScale object to optimise...
optimise_scale = MeanShiftCompositeScale(kernel)
optimise_scale.add_param_scale(0)
optimise_scale.add_param_kernel('ca')
optimise_scale.add_param_kernel('cb')

steps = optimise_scale(ms)

print 'Optimisation of "a" took %i steps' % steps
print 'kernel = %s' % ms.get_kernel()
Exemple #45
0
for camera in cameras:
    direction = numpy.random.vonmises(camera[3], conc, size=camera[0])
    x = numpy.random.normal(camera[1], sd_x, size=camera[0])
    y = numpy.random.normal(camera[2], sd_y, size=camera[0])

    block = numpy.concatenate((x.reshape((-1, 1)), y.reshape(
        (-1, 1)), numpy.cos(direction).reshape(
            (-1, 1)), numpy.sin(direction).reshape((-1, 1))),
                              axis=1)
    data.append(block)

# Construct the mean shift object from it, including a composite kernel...
kde = []
for ds in data:
    ms = MeanShift()
    ms.set_data(ds, 'df')
    if len(kde) == 0:
        ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
        ms.set_spatial('kd_tree')
        ms.set_scale(numpy.array([10.0, 5.0, 1.0, 1.0]))
        ms.merge_range = 0.05
    else:
        ms.copy_all(kde[0])

    kde.append(ms)

# Visualise the data set...
for ind, ds in enumerate(data):
    img = numpy.zeros((size, size, 3), dtype=numpy.float32)

# Sample from a circle + noise model to create some data...
samples = 8192
theta = 2.0 * numpy.pi * numpy.random.random(samples)
radius = 3.0 + (numpy.random.beta(2.0, 2.0, samples)-0.5)

x = radius * numpy.cos(theta)
y = radius * numpy.sin(theta)

data = numpy.concatenate((x.reshape((-1,1)), y.reshape((-1,1))), axis=1)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_spatial('kd_tree')



# Loop the circle; project points to it...
steps = 16
for i in xrange(steps):
  angle = 2.0 * numpy.pi * float(i) / float(steps)
  rad = 3.0 + (numpy.random.beta(2.0, 2.0)-0.5)
  
  x = rad * numpy.cos(angle)
  y = rad * numpy.sin(angle)
  
  proj = ms.manifold(numpy.array([x,y]),1)
Exemple #47
0
# Copyright 2014 Tom SF Haines

# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

#   http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import numpy.random

from ms import MeanShift

# Create the most perverse data matrix array I can imagine...
data = numpy.random.uniform(size=(3, 2, 2, 2))

print 'Insane input array:'
print data
print

# Create a MeanShift object with the data...
ms = MeanShift()
ms.set_data(
    data, 'fdbf')  # This should make your head hurt. If it doesn't seek help!

# Loop and print the feature vectors it sees...
print 'What MeanShift sees:'
for i in xrange(len(ms)):
    print i, '|', ms[i]
print
Exemple #48
0
for i in xrange(samples):
  data_b[i,3:] = to_origin(data_b[i,:3])



# Add a bit of noise...
data_a += 0.1 * numpy.random.normal(size=(samples,6))
data_b += 0.1 * numpy.random.normal(size=(samples,6))



# Create two distributions...
spatial_scale = 8.0
scale = numpy.array([spatial_scale, spatial_scale, spatial_scale, 1.0, 1.0, 1.0, 1.0])

mult_a = MeanShift()
mult_a.set_data(data_a, 'df', None, '...V')
mult_a.set_kernel('composite(3:gaussian,4:mirror_fisher(512.0))')
mult_a.set_spatial('kd_tree')
mult_a.set_scale(scale)

mult_b = MeanShift()
mult_b.set_data(data_b, 'df', None, '...V')
mult_b.copy_all(mult_a)
mult_b.set_scale(scale)



# A function for converting a distribution into a ply file...
def to_ply(fn, samples):
  # Open and header...
Exemple #49
0
  for y in xrange(img.shape[0]):
    t = float(y) / float(img.shape[0]-1)
    x = row[0] * t + row[1] * (1.0-t)
    if x<-numpy.pi: x += numpy.pi
    if x>numpy.pi: x -= numpy.pi
    x = int(img.shape[1] * (x + numpy.pi) / (numpy.pi * 2.0))
    
    img[y, x] += 1.0

img *= 255.0 / img.max()
cv.SaveImage('angle_input.png', array2cv(img))



# Try using mean shift to cluster...
ms = MeanShift()
ms.set_data(data, 'df', None, 'AA')
ms.set_kernel('composite(2:fisher(16.0),2:fisher(16.0))')

modes, indices = ms.cluster()
print 'Found %i modes' % modes.shape[0]



# Another crazy visualisation, this time the modes...
img = numpy.zeros((256, 1024), dtype=numpy.float32)
for row in modes:
  for y in xrange(img.shape[0]):
    t = float(y) / float(img.shape[0]-1)
    x = row[0] * t + row[1] * (1.0-t)
    if x<-numpy.pi: x += numpy.pi

# Simply creates two identical Fisher distributions, one using the approximation, the other the correct value, and then runs some tests to verify that they are the same.



# Parameters...
dims = 4
kernel = 'fisher'
conc = 256.0
samples = 1024



# Create the two distributions - don't need to be complicated...
correct = MeanShift()
correct.set_data(numpy.array([1.0] + [0.0]*(dims-1), dtype=numpy.float32), 'f')
correct.set_kernel('%s(%.1fc)' % (kernel, conc))
correct.quality = 1.0

approximate = MeanShift()
approximate.set_data(numpy.array([1.0] + [0.0]*(dims-1), dtype=numpy.float32), 'f')
approximate.set_kernel('%s(%.1fa)' % (kernel, conc))
approximate.quality = 1.0



# Draw a bunch of samples and compare the probabilities in both to check they are basically the same...
sample = correct.draws(samples)

cp = correct.probs(sample)
Exemple #51
0
    sin_theta = numpy.sin(theta)

    which = [deflection, cos_theta, sin_theta]
    chunk = numpy.concatenate((which[ex_dim].reshape(
        (-1, 1)), which[(ex_dim + 1) % 3].reshape(
            (-1, 1)), which[(ex_dim + 2) % 3].reshape((-1, 1))),
                              axis=1)

    data.append(chunk)

data = numpy.concatenate(data, axis=0)
data /= numpy.sqrt(numpy.square(data).sum(axis=1)).reshape((-1, 1))

# Setup mean shift...
print 'Fisher:'
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel('fisher(4096.0)')
ms.set_spatial('kd_tree')

# Visualise the samples on a mercator projection...
print '  Samples...'

image = numpy.zeros((height, width, 3), dtype=numpy.float32)
for vec in data:
    x = numpy.arctan2(vec[1], vec[0])
    if x < 0.0: x += 2.0 * numpy.pi
    x = x * width / (2.0 * numpy.pi)
    if x >= width: x -= width
Exemple #52
0
import random
import numpy

from ms import MeanShift


# Create a dataset - equally spaced samples weighted by a Gaussian, such that it should estimate a Gaussian...
x = numpy.arange(-5.0, 5.0, 0.02)
y = numpy.exp(-0.5 * x ** 2.0 / 2.0)

data = numpy.concatenate((x.reshape((-1, 1)), y.reshape((-1, 1))), axis=1)


# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, "df")

ms.set_kernel(random.choice(filter(lambda s: s != "fisher", ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))
ms.set_scale(numpy.ones(2), 1)


# Iterate and calculate the probability at every point...
sam = numpy.arange(-5.0, 5.0, 0.15)
prob = numpy.array(map(lambda v: ms.prob(numpy.array([v, 1.0])), sam))


# Print out basic stats...
print "kernel = %s; spatial = %s" % (ms.get_kernel(), ms.get_spatial())
print "exemplars = %i; features = %i" % (ms.exemplars(), ms.features())
Exemple #53
0
for r in xrange(data.shape[0]):
  loc = data[r,:]
  loc = (loc + size) / (2.0*size)
  loc *= dim
  try:
    image[int(loc[1]+0.5), int(loc[0]+0.5), :] = 255.0
  except: pass # Deals with out of range values.

image = array2cv(image)
cv.SaveImage('draw_input.png', image)



# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'df')
ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial('kd_tree')

print 'kernel = %s' % ms.get_kernel()



# Choose a reasonable size...
print 'Selecting size using loo:'
p = ProgBar()
ms.scale_loo_nll(callback = p.callback)
del p

Exemple #54
0
#   http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import numpy
import numpy.random
from scipy.misc import imsave

from utils.prog_bar import ProgBar

from ms import MeanShift



# Visualises each kernel...
for kernel in MeanShift.kernels():
  if MeanShift.info_config(kernel)!=None:
    continue
  
  a = MeanShift()
  a.set_data(numpy.array([0.0], dtype=numpy.float32), 'f')
  a.set_kernel(kernel)
  a.quality = 1.0
  
  image = numpy.ones((384, 1024), dtype=numpy.float32)
  x = numpy.linspace(-4, 4, image.shape[1])
  iy, ix = numpy.meshgrid(numpy.arange(image.shape[0]), numpy.arange(image.shape[1]), indexing='ij')
  
  ya = a.probs(x[:,None])
  ya = (image.shape[0]-1 - (image.shape[0]-1) * (ya / ya.max())).astype(numpy.int32)
  
Exemple #55
0
import numpy.random
from scipy.misc import imsave

from utils.prog_bar import ProgBar

from ms import MeanShift

# Multiplies the three great bells (Gaussian, Cauchy and Logistic - not their actual name, but it should be:-P) such that only their tails overlap, drawing a lot of samples then visualising a density estimate of the resulting shape - a rather roundabout demonstration of how they are different...

great_bells = [('gaussian', 4.0), ('cauchy', 6.0), ('logistic', 8.0)]

for bell, gap in great_bells:
    print '%s:' % bell

    # Setup two single sample models...
    a = MeanShift()
    a.set_data(numpy.array([-0.5 * gap], dtype=numpy.float32), 'f')
    a.set_kernel(bell)
    a.quality = 1.0

    b = MeanShift()
    b.set_data(numpy.array([0.5 * gap], dtype=numpy.float32), 'f')
    b.set_kernel(bell)
    b.quality = 1.0

    # Multiply them and generate new distribution...
    draw = numpy.empty((1024, 1), dtype=numpy.float32)
    MeanShift.mult([a, b], draw)

    ab = MeanShift()
    ab.set_data(draw, 'df')
# Create a data set, 2D...
means = [[1.0,1.0], [4.0,4.0], [4.0,1.0], [1.0,4.0]]
quantity = 250

a = numpy.random.multivariate_normal(means[0], 0.1*numpy.eye(2), quantity)
b = numpy.random.multivariate_normal(means[1], 0.2*numpy.eye(2), quantity)
c = numpy.random.multivariate_normal(means[2], 0.3*numpy.eye(2), quantity)
d = numpy.random.multivariate_normal(means[3], 0.4*numpy.eye(2), quantity)

data = numpy.concatenate((a,b,c,d), axis=0)



# Use mean shift to cluster it...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))

modes, indices = ms.cluster()



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (ms.quality, ms.epsilon, ms.iter_cap)
print
Exemple #57
0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.

import random
import numpy
import numpy.random

from ms import MeanShift

# Create a simple data set...
a = numpy.random.normal(3.0, 1.0, 100)
b = numpy.random.normal(5.0, 0.5, 50)

data = numpy.concatenate((a, b))

# Setup the mean shift object...
ms = MeanShift()
ms.set_data(data, 'd')

normal_kernels = [
    'uniform', 'triangular', 'epanechnikov', 'cosine', 'gaussian', 'cauchy',
    'logistic'
]
ms.set_kernel(random.choice(normal_kernels))
ms.set_spatial(random.choice(ms.spatials()))

# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (
    ms.quality, ms.epsilon, ms.iter_cap)
print