Beispiel #1
0
def similar_memes(name):
    ext = name.split('.')[1]
    url = bottle.request.query['url']
    print(url)
    f = FEATURE(cv2.imdecode(np.frombuffer(requests.get(url).content, dtype=np.uint8), 1))
    f = normalize_features(f.reshape((1, f.size)))
    h = image_search.hash_samples(f, HASH_DATA['proj'], HASH_DATA['sample_mean'])[0]
    similar = [(int(dist), IMAGE_URIS[ind], IMAGE_KYM_URIS[ind])
                       for dist, ind in HAMMING.knn(HASHES, h, 50)]
    meme_counts = {}
    for _, _, x in similar:
        try:
            meme_counts[x] += 1
        except KeyError:
            meme_counts[x] = 1
    meme_counts = sorted(meme_counts.items(), key=lambda x: x[1], reverse=True)
    out = {'similar': similar, 'meme_counts': meme_counts}
    if ext == 'js':
        return out
    return ''.join('<img src="%s"/>' % x for _, x, _ in out['similar'])
Beispiel #2
0
def main():
    image_uris, features = [], []
    with open('features.pkl') as fp:
        while 1:
            try:
                i, f = pickle.load(fp)
            except:
                break
            image_uris.append(i)
            features.append(f)
    print(len(image_uris))
    features = np.asfarray(features)
    features = normalize_features(features)
    train_samples, test_samples = image_search.samples_split_train_test(features)
    out = image_search.evaluate_hashing_method(train_samples, test_samples, 128, 'RR')
    print(image_search.print_pr_ret(out))
    print(features.shape)
    print(out['proj'].shape)
    print(out['sample_mean'].shape)
    hashes = image_search.hash_samples(features, out['proj'], out['sample_mean'])
    pickle.dump((image_uris, hashes, out), open('hashes.pkl', 'w'), -1)