def _syntheticProc(dkf, dataset, mask, dataset_eval, mask_eval): """ Collect statistics on the synthetic dataset """ allmus, alllogcov = [], [] for s in range(10): _, mus, logcov = DKF_evaluate.infer(dkf, dataset, mask) allmus.append(np.copy(mus)) alllogcov.append(np.copy(logcov)) allmus_v, alllogcov_v = [], [] for s in range(10): _, mus, logcov = DKF_evaluate.infer(dkf, dataset_eval, mask) allmus_v.append(np.copy(mus)) alllogcov_v.append(np.copy(logcov)) mu_train = np.concatenate(allmus, axis=2).mean(2, keepdims=True) cov_train = np.exp(np.concatenate(alllogcov, axis=2)).mean(2, keepdims=True) mu_valid = np.concatenate(allmus_v, axis=2).mean(2, keepdims=True) cov_valid = np.exp(np.concatenate(alllogcov_v, axis=2)).mean(2, keepdims=True) #Extract the learned parameters w/in the generative model learned_params = {} for k in dkf.params_synthetic[dkf.params['dataset']]['params']: learned_params[k] = dkf.tWeights[k + '_W'].get_value() return mu_train, cov_train, mu_valid, cov_valid, learned_params
def knn(query, samples, neighbor_by_class=3): """Infer a query with K-nearsest neighbors strategy Args: query (dict): { 'img': <path_to_img>, 'cls': <img class>, 'hist' <img histogram> } samples (list): list of { 'img': <path_to_img>, 'cls': <img class>, 'hist' <img histogram> } neighbor_by_class (int, optional): nb nearsest neighbors by class. Defaults to 3. Returns: str: selected class """ _, results = infer(query, samples) # Group by class res = sorted(results, key=lambda x: x['cls']) grouped_by_class = { k: list(sorted(map(lambda x: x['dis'], g)))[:neighbor_by_class] for k, g in groupby(res, key=lambda x: x['cls']) } # Calc average by class grouped_by_class = {k: mean(v) for k, v in grouped_by_class.items()} # Get max selected_class = sorted(grouped_by_class.items(), key=lambda x: x[1])[:1][0][0] return selected_class
def get_results(): if request.method == 'POST': q_img = request.files['image'] print(q_img) obj = Embeddings() q_hist = obj.make_hist(q_img) results = infer(q_hist, depth=depth, d_type=d_type) return render_template('results.html', results=results)
def _syntheticProc(dkf, dataset, dataset_eval): """ Collect statistics on the synthetic dataset """ allmus, alllogcov = [], [] for s in range(100): _,mus, logcov = DKF_evaluate.infer(dkf,dataset) allmus.append(np.copy(mus)) alllogcov.append(np.copy(logcov)) allmus_v, alllogcov_v = [], [] for s in range(100): _,mus, logcov = DKF_evaluate.infer(dkf,np.copy(dataset_eval)) allmus_v.append(np.copy(mus)) alllogcov_v.append(np.copy(logcov)) mu_train = np.concatenate(allmus,axis=2).mean(2,keepdims=True) cov_train= np.exp(np.concatenate(alllogcov,axis=2)).mean(2,keepdims=True) mu_valid = np.concatenate(allmus_v,axis=2).mean(2,keepdims=True) cov_valid= np.exp(np.concatenate(alllogcov_v,axis=2)).mean(2,keepdims=True) return mu_train, cov_train, mu_valid, cov_valid
# print(result) # # retrieve by gabor # method = Gabor() # samples = method.make_samples(db) # query = samples[query_idx] # _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # print(result) # # retrieve by HOG # method = HOG() # samples = method.make_samples(db) # query = samples[query_idx] # _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # print(result) # # retrieve by VGG # method = VGGNetFeat() # samples = method.make_samples(db) # query = samples[query_idx] # _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # print(result) # retrieve by resnet method = ResNetFeat() samples = method.make_samples(db) query_idx = 1569 query = samples[query_idx] _, result = infer(query, samples=samples, depth=depth, d_type=d_type) print(result)
from evaluate import infer from create_hist import Embeddings import cv2 import time depth = 15 d_type = 'd1' q_img = "database/images/3159.jpg" if __name__ == '__main__': obj = Embeddings() q_hist = obj.make_hist(q_img) i_img = cv2.imread(q_img) cv2.imshow('Query', i_img) results = infer(q_hist, depth=depth, d_type=d_type) for index, result in enumerate(results): print(index, result['img'], result['dis']) r = cv2.imread(result['img']) cv2.imshow("Results", r) cv2.waitKey(0)
samples.append({'img': d_img, 'cls': d_cls, 'hist': d_hist}) cPickle.dump(samples, open(os.path.join(cache_dir, sample_cache), "wb")) return samples if __name__ == "__main__": d = Daisy() # Create my samples db = Database("database\\train") print("Train databse created.") samples = d.make_samples(db, sample_name="train") print("Train samples created.") test = Database("database\dev") print("Test databse created.") sample_test = d.make_samples(test, sample_name="dev") print("Test samples created.") # Find class for each image of my test DB and verify the result nb_good_classification = 0 for img_test in sample_test: _, resultes = infer(img_test, samples) real_cls = KNN(resultes, db.get_class()) nb_good_classification += get_cls(img_test['cls']) == get_cls(real_cls) print("\n{}/{}".format(nb_good_classification, len(sample_test)))
def test(db, query_idx): results = {} # retrieve by color method = Color() samples = method.make_samples(db) query = samples[query_idx] # print(samples) img = scipy.misc.imread(query['img']) # print(query) _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # results.append(result[0]['cls']) inc(results, result[0]['cls']) # # retrieve by daisy # method = Daisy() # samples = method.make_samples(db) # query = samples[query_idx] # _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # # results.append(result[0]['cls']) # inc(results, result[0]['cls']) # # # retrieve by edge method = Edge() samples = method.make_samples(db) query = samples[query_idx] # print(samples) query = samples[query_idx] img = scipy.misc.imread(query['img']) _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # results.append(result[0]['cls']) inc(results, result[0]['cls']) # # # retrieve by gabor # # method = Gabor() # # samples = method.make_samples(db) # # query = samples[query_idx] # # _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # # print(result) # # inc(results, result[0]['cls']) # # retrieve by HOG method = HOG() samples = method.make_samples(db) query = samples[query_idx] _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # results.append(result[0]['cls']) inc(results, result[0]['cls']) # # retrieve by VGG method = VGGNetFeat() samples = method.make_samples(db) query = samples[query_idx] _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # results.append(result[0]['cls']) inc(results, result[0]['cls']) # # retrieve by resnet method = ResNetFeat() samples = method.make_samples(db) query = samples[query_idx] _, result = infer(query, samples=samples, depth=depth, d_type=d_type) # results.append(result[0]['cls']) inc(results, result[0]['cls']) import os from PIL import Image print(results) finalresult = max(results.items(), key=operator.itemgetter(1))[0] #string=".../database/"+finalresult+"/" string = "./database/" + finalresult + "/" print(string) a = 1 for file in os.listdir(string): a += 1 tempimg = Image.open(string + file) tempimg.show() print(string + file) if (a == 10): break print("Final result is: ", finalresult) scipy.misc.imshow(img)