class LaneHypothesis(object):
	def __init__(self, position, momentum, weight=1.0):
		self.positions = []
		self.positions.append(position)
		self.weight = weight
		self.prev_data = None
		self.momentum = momentum
		self.dirfilter = None

	def _feat_probability(self, feat):
		std = 0.1
		return scipy.stats.norm.pdf(1.0 - feat, 0.0, scale=std)

	@property
	def position(self):
		return self.positions[-1]

	def evidence(self, data):
		if self.dirfilter is None:
			self.dirfilter = AngleFilter(data.shape)
		
		c = self.dirfilter.get_component_values(data, mode='valid')[0][0]
		feat = self.dirfilter.basis.max_response_value(c)/115.0
		#feat = pearsonr(data.flatten(), self.prev_data.flatten())[0]
		#prob = self._feat_probability(lightness*sobeldiff)
		return feat

	def prior(self, prev):
		disp = self.position - prev.position
		self.momentum = 0.5*disp + 0.5*prev.momentum
		prob = scipy.stats.norm.pdf(disp, loc=self.momentum, scale=20.0)
		return prob
	def evidence(self, data):
		if self.dirfilter is None:
			self.dirfilter = AngleFilter(data.shape)
		
		c = self.dirfilter.get_component_values(data, mode='valid')[0][0]
		feat = self.dirfilter.basis.max_response_value(c)/115.0
		#feat = pearsonr(data.flatten(), self.prev_data.flatten())[0]
		#prob = self._feat_probability(lightness*sobeldiff)
		return feat
Exemple #3
0
def detwalk(image, prev_start):
	blocksize = (9, 9)

	filt = AngleFilter(blocksize)
	
	rows = range(0, image.shape[0]-blocksize[1], blocksize[1]/2)
	
	tracker = HypothesisTracker()
	particles = []
	
	for i, row in enumerate(rows):
		candidates = []
		d = image[row:row+blocksize[1]]
		comp = filt.get_component_values(d, mode='valid')[0]
		angles = filt.basis.max_response_angle(comp.T)
		values = filt.basis.max_response_value(comp.T)

		#valid = values > 1.5
		valid = np.argsort(values)[::-1][:5]
		indices = np.array(range(len(angles)), dtype=float)
		
		angles = np.array(angles[valid])
		feats = np.array((indices[valid]))
		weights = np.array((values[valid]))
		if len(feats) < 2: continue
		feats = feats.reshape((len(feats), 1))
		clusters = scipy.cluster.hierarchy.fclusterdata(feats, 3, criterion='distance')
		for c in np.unique(clusters):
			x = np.mean(feats[clusters == c])+blocksize[0]/2.0
			y = row+blocksize[1]/2.0
			#pylab.scatter(x, y)
			candidates.append(((x, y),
					np.median(angles[clusters == c]),
					np.mean(values[clusters == c])))
		candidates.sort(key=lambda x: x[-1])
		particles = tracker.spawn_hypotheses(particles, candidates)

	particles.sort(key=lambda p: p.mean_weight(), reverse=True)
	return particles