def get_visual_words(im, mask, vocabulary): interest_points = get_interest_points(mask) descriptors, _ = compute_boundary_desc(im, mask, interest_points) desc_count = len(descriptors) words = np.zeros(desc_count) if desc_count > 0: dists = euclidean_distances(descriptors, vocabulary) words = map(lambda i: dists[i].argmin(), range(desc_count)) return words
def score_results(coords, desc, search_results, names, voc, verbose=False): """ Scores the 200 best results params ------ coords: desc: search_results: names: ndarray, image database voc: ndarray vocabulary verbose: boolean, optional, default: False Make output more verbose returns: search_results: ndarray indxs, scores """ # FIXME doxstring for j, (result, score) in enumerate(search_results): if verbose: print "Scoring %d / %d" % (j, len(search_results)) im2, mask2 = get_image(names[result, 0]) interest_points = get_interest_points(mask2) desc2, coords2 = compute_boundary_desc(im2, mask2, interest_points) if desc2: search_results[j, 1] += score_(desc, desc2, coords, coords2) idxs = search_results[:, 1].argsort()[::-1] return search_results[idxs, :]
import load import matplotlib.pyplot as plt from descriptors import get_interest_points gen = load.load_data() _, _ = gen.next() im, mask = gen.next() points = get_interest_points(mask, min_dist=40) plt.figure() plt.imshow(im) plt.scatter(points[:, 1], points[:, 0])