Example #1
0
  data.append(block)



# Construct the mean shift object from it, including a composite kernel...
kde = []
for ds in data:
  ms = MeanShift()
  ms.set_data(ds, 'df')
  if len(kde)==0:
    ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
    ms.set_spatial('kd_tree')
    ms.set_scale(numpy.array([10.0,5.0,1.0,1.0]))
    ms.merge_range = 0.05
  else:
    ms.copy_all(kde[0])
    
  kde.append(ms)



# Visualise the data set...
for ind, ds in enumerate(data):
  img = numpy.zeros((size, size, 3), dtype=numpy.float32)

  for sample in ds:
    s_x = (size-1) * sample[1] / scale
    s_y = (size-1) * sample[0] / scale
    e_x = (size-1) * (sample[1] + angle_len * sample[3]) / scale
    e_y = (size-1) * (sample[0] + angle_len * sample[2]) / scale
  
Example #2
0


# Create two distributions...
spatial_scale = 8.0
scale = numpy.array([spatial_scale, spatial_scale, spatial_scale, 1.0, 1.0, 1.0, 1.0])

mult_a = MeanShift()
mult_a.set_data(data_a, 'df', None, '...V')
mult_a.set_kernel('composite(3:gaussian,4:mirror_fisher(512.0))')
mult_a.set_spatial('kd_tree')
mult_a.set_scale(scale)

mult_b = MeanShift()
mult_b.set_data(data_b, 'df', None, '...V')
mult_b.copy_all(mult_a)
mult_b.set_scale(scale)



# A function for converting a distribution into a ply file...
def to_ply(fn, samples):
  # Open and header...
  f = open(fn, 'w')
  f.write('ply\n')
  f.write('format ascii 1.0\n');
  
  f.write('element vertex %i\n' % (samples.shape[0]*5))
  f.write('property float x\n')
  f.write('property float y\n')
  f.write('property float z\n')
Example #3
0
    ms.quality = 0.5
    ms.set_data(numpy.array([1, 0, 0], dtype=numpy.float32), 'f')
    ms.set_kernel('fisher(%.1f%s)' % (2**power, code))
    ms.set_spatial('kd_tree')

    return ms


options = map(ms_by_conc, xrange(8)) + [
    ms_by_conc(8, 'c'), ms_by_conc(8, 'a')
] + map(ms_by_conc, xrange(9, 16))

# Create it and do the bandwidth estimation...
ms = MeanShift()
ms.set_data(data, 'df')

p = ProgBar()
best = ms.scale_loo_nll_array(options, p.callback)
del p

print 'Selected kernel =', ms.get_kernel()
print 'LOO score =', best

# Visualise the best option...
visualise('bandwidth_fisher.png', ms)

# Also visualise correct vs approximate, for sanity checking...
for option in [ms_by_conc(8, 'c'), ms_by_conc(8, 'a')]:  #options:
    ms.copy_all(option)
    visualise('bandwidth_fisher_%s.png' % option.get_kernel(), ms)
Example #4
0
mult_a.set_data(data_a, 'd', None, 'A')
mult_a.set_kernel('fisher(128.0)')

mult_b = MeanShift()
mult_b.set_data(data_b, 'd', None, 'A')
mult_b.set_kernel('fisher(512.0)')



# Do multiplication...
data_ab = numpy.empty((128,1), dtype=numpy.float32)
MeanShift.mult((mult_a, mult_b), data_ab)

mult_ab = MeanShift()
mult_ab.set_data(data_ab, 'df', None, 'A')
mult_ab.copy_all(mult_b)



# Visualise all angles...
img = numpy.zeros((64, 1024,3), dtype=numpy.float32)

for i in xrange(img.shape[1]):
  ang = 2.0 * numpy.pi * i / float(img.shape[1])
  img[:,i,0] = mult_a.prob(numpy.array([ang]))
  img[:,i,1] = mult_ab.prob(numpy.array([ang]))
  img[:,i,2] = mult_b.prob(numpy.array([ang]))



img *= 255.0 / img.max()
Example #5
0
            (-1, 1)), numpy.sin(direction).reshape((-1, 1))),
                              axis=1)
    data.append(block)

# Construct the mean shift object from it, including a composite kernel...
kde = []
for ds in data:
    ms = MeanShift()
    ms.set_data(ds, 'df')
    if len(kde) == 0:
        ms.set_kernel('composite(2:gaussian,2:fisher(32.0))')
        ms.set_spatial('kd_tree')
        ms.set_scale(numpy.array([10.0, 5.0, 1.0, 1.0]))
        ms.merge_range = 0.05
    else:
        ms.copy_all(kde[0])

    kde.append(ms)

# Visualise the data set...
for ind, ds in enumerate(data):
    img = numpy.zeros((size, size, 3), dtype=numpy.float32)

    for sample in ds:
        s_x = (size - 1) * sample[1] / scale
        s_y = (size - 1) * sample[0] / scale
        e_x = (size - 1) * (sample[1] + angle_len * sample[3]) / scale
        e_y = (size - 1) * (sample[0] + angle_len * sample[2]) / scale

        for i in xrange(angle_step):
            t = float(i) / (angle_step - 1)
Example #6
0
MeanShift.mult([mult_a, mult_b], draws)

prod_a_b = MeanShift()
prod_a_b.set_data(draws, 'df')
prod_a_b.set_kernel('mirror_fisher(512.0)')

visualise('mirror_fisher_prod_a_b.png', prod_a_b)
print 'Prepared and visualised product of a and b'

## Multiply b and c distributions and visualise...
draws = numpy.empty((count, 2))
MeanShift.mult((mult_b, mult_c), draws)

prod_b_c = MeanShift()
prod_b_c.set_data(draws, 'df')
prod_b_c.copy_all(prod_a_b)

visualise('mirror_fisher_prod_b_c.png', prod_b_c)
print 'Prepared and visualised product of b and c'

## Multiply c and a distributions and visualise...
## This doesn't work - equal sampling of initial state, where one is ultimatly much more probable but forming two islands that could require millions of sampling steps to transfer between, so it ends up with the islands having equal probability when they really shouldn't...
draws = numpy.empty((count, 2))
MeanShift.mult((mult_c, mult_a), draws)

prod_c_a = MeanShift()
prod_c_a.set_data(draws, 'df')
prod_c_a.copy_all(prod_a_b)

visualise('mirror_fisher_prod_c_a_wrong.png', prod_c_a)
print 'Prepared and visualised product of c and a'
  ms.set_spatial('kd_tree')
  
  return ms

options = map(ms_by_conc, xrange(8)) + [ms_by_conc(8,'c'), ms_by_conc(8,'a')] + map(ms_by_conc, xrange(9,16))



# Create it and do the bandwidth estimation...
ms = MeanShift()
ms.set_data(data, 'df')

p = ProgBar()
best = ms.scale_loo_nll_array(options, p.callback)
del p

print 'Selected kernel =', ms.get_kernel()
print 'LOO score =', best



# Visualise the best option...
visualise('bandwidth_fisher.png', ms)



# Also visualise correct vs approximate, for sanity checking...
for option in [ms_by_conc(8,'c'), ms_by_conc(8,'a')]: #options:
  ms.copy_all(option)
  visualise('bandwidth_fisher_%s.png' % option.get_kernel(), ms)