b = numpy.random.multivariate_normal(means[1], 0.2*numpy.eye(2), quantity)
c = numpy.random.multivariate_normal(means[2], 0.3*numpy.eye(2), quantity)
d = numpy.random.multivariate_normal(means[3], 0.4*numpy.eye(2), quantity)

data = numpy.concatenate((a,b,c,d), axis=0)



# Use mean shift to cluster it...
ms = MeanShift()
ms.set_data(data, 'df')

ms.set_kernel(random.choice(filter(lambda s: s!='fisher', ms.kernels())))
ms.set_spatial(random.choice(ms.spatials()))

modes, indices = ms.cluster()



# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (ms.quality, ms.epsilon, ms.iter_cap)
print



# Print out a grid of cluster assignments...
for j in xrange(20):
  for i in xrange(20):
    fv = numpy.array([0.25*j, 0.25*i])
Example #2
0
  
  for i in xrange(angle_step):
    try:
      t = float(i) / (angle_step-1)
      t_x = int(t * s_x + (1-t) * e_x)
      t_y = int(t * s_y + (1-t) * e_y)
      try:
        if img[t_y,t_x,0] < t*0.666:
          img[t_y,t_x,:] = t*0.666
      except:
        pass
    except ValueError:
      pass
      #print 'NaN:-('

modes, _ = ms.cluster()

for ii, sample in enumerate(modes):
  print 'mode %i: position = (%.3f, %.3f), direction = (%.3f,%.3f)' % (ii, sample[0], sample[1], sample[2], sample[3])
  s_x = (size-1) * sample[1] / scale
  s_y = (size-1) * sample[0] / scale
  e_x = (size-1) * (sample[1] + angle_len * sample[3]) / scale
  e_y = (size-1) * (sample[0] + angle_len * sample[2]) / scale
  
  
  for i in xrange(angle_step):
    try:
      t = float(i) / (angle_step-1)
      t_x = int(t * s_x + (1-t) * e_x)
      t_y = int(t * s_y + (1-t) * e_y)
      try:
Example #3
0
        1.0 / colour_scale, 1.0 / colour_scale
    ]))

ms.quality = 0.0
ms.ident_dist = 0.3
ms.merge_range = 0.6
ms.merge_check_step = 1

# Print out basic stats...
print 'kernel = %s; spatial = %s' % (ms.get_kernel(), ms.get_spatial())
print 'exemplars = %i; features = %i' % (ms.exemplars(), ms.features())
print 'quality = %.3f; epsilon = %.3f; iter_cap = %i' % (
    ms.quality, ms.epsilon, ms.iter_cap)
print 'ident_dist = %.3f; merge_range = %.3f; merge_check_step = %i' % (
    ms.ident_dist, ms.merge_range, ms.merge_check_step)
print

# Generate a segmentation image...
modes, indices = ms.cluster()
image = modes[indices.flatten(), 2:].reshape(image.shape)

print 'Found %i modes' % modes.shape[0]

#image = ms.modes_data()[:,:,2:] # Gets to the same result (Ignoring floating point variations), but crazy slow.

# Save the segmentation image...
root, ext = os.path.splitext(fn)
ofn = root + '_seg' + ext
image = array2cv(image)
cv.SaveImage(ofn, image)
Example #4
0
    for i in xrange(angle_step):
        try:
            t = float(i) / (angle_step - 1)
            t_x = int(t * s_x + (1 - t) * e_x)
            t_y = int(t * s_y + (1 - t) * e_y)
            try:
                if img[t_y, t_x, 0] < t * 0.666:
                    img[t_y, t_x, :] = t * 0.666
            except:
                pass
        except ValueError:
            pass
            #print 'NaN:-('

modes, _ = ms.cluster()

for ii, sample in enumerate(modes):
    print 'mode %i: position = (%.3f, %.3f), direction = (%.3f,%.3f)' % (
        ii, sample[0], sample[1], sample[2], sample[3])
    s_x = (size - 1) * sample[1] / scale
    s_y = (size - 1) * sample[0] / scale
    e_x = (size - 1) * (sample[1] + angle_len * sample[3]) / scale
    e_y = (size - 1) * (sample[0] + angle_len * sample[2]) / scale

    for i in xrange(angle_step):
        try:
            t = float(i) / (angle_step - 1)
            t_x = int(t * s_x + (1 - t) * e_x)
            t_y = int(t * s_y + (1 - t) * e_y)
            try: