def gen_colormap(max_alpha, max_beta, storage, NNU_X, NNU_D, nbrs, base_path, random=False, verbose=False): base_path = "/home/brad/11.15/" if random: iden = "random" else: nnu = NNU(max_alpha, max_beta, storage) nnu.build_index(NNU_D) iden = nnu.name count = np.zeros((max_alpha, max_beta)) for alpha in range(1, max_alpha + 1): for beta in range(1, max_beta + 1): if not random: nnu_nbrs = nnu.index(NNU_X, alpha=alpha, beta=beta) else: nnu_nbrs = [] for x in X: idxs = np.random.permutation(len(D))[: alpha * beta] nnu_nbrs.append(idxs[np.argmax(np.abs(np.dot(D[idxs], x)))]) pct_found = len(np.where(nnu_nbrs == nbrs)[0]) / float(len(X)) count[alpha - 1, beta - 1] = pct_found print alpha, beta, count[alpha - 1, beta - 1] # plt.imshow(count, interpolation='nearest', vmin=0, vmax=1) # plt.colorbar() # fig = plt.gcf() # fig.set_size_inches(18.5, 10.5) # plt.savefig(base_path + iden + 'color.png') # plt.clf() np.save(base_path + iden + "_count", count)
D_mean = np.mean(D, axis=0) D = D - D_mean alphas = [1, 2, 3, 4, 5, 5, 5, 10, 10, 15, 25, 30] betas = [1, 1, 1, 1, 1, 2, 4, 5, 10, 10, 25, 25] storages = [Storage_Scheme.mini, Storage_Scheme.two_mini, Storage_Scheme.half] storages = [Storage_Scheme.mini] nnu_dists = {} nnu_runtimes = {} ABs = {} #NNU for storage in storages: nnu = NNU(5, 5, storage) print nnu.name nnu.build_index(D) nnu_dists[nnu.name] = [] nnu_runtimes[nnu.name] = [] ABs[nnu.name] = [] for alpha, beta in zip(alphas, betas): runtime_total = 0.0 avg_abs = [] svm_xs_tr, svm_xs_t = [], [] total_matches = 0 total_samples = 0 enc_func = partial(nnu_to_bow, alpha=alpha, beta=beta, nnu=nnu, N=N)
D = D - D_mean alphas = [1, 2, 3, 4, 5, 5, 5, 10, 10, 15, 25, 30] betas = [1, 1, 1, 1, 1, 2, 4, 5, 10, 10, 25, 25] storages = [Storage_Scheme.mini, Storage_Scheme.two_mini, Storage_Scheme.half] storages = [Storage_Scheme.two_mini] nnu_dists = {} nnu_runtimes = {} ABs = {} pool = Pool(processes=4) #NNU for storage in storages: nnu = NNU(30, 25, storage) nnu.build_index(D) print nnu.name nnu_dists[nnu.name] = [] nnu_runtimes[nnu.name] = [] ABs[nnu.name] = [] for alpha, beta in zip(alphas, betas): runtime_total = 0.0 avg_abs = [] svm_xs_tr, svm_xs_t = [], [] total_matches = 0 total_samples = 0 svm_xs_tr = pool.map(enc_func, X_tr)
x = x - D_mean nbrs = np.argmax(np.abs(np.dot(D, x.T)), axis=0) svm_X.append(util.bow(nbrs, args['D_atoms'])) clf = SVC(kernel='linear') clf.fit(svm_X, Y) assert False svm_dict = {} svm_dict['num_classes'] = len(set(Y)) svm_dict['num_clfs'] = len(clf.intercept_) svm_dict['num_features'] = len(clf.coef_[0]) svm_dict['coefs'] = list(clf.coef_.flatten()) svm_dict['intercepts'] = list(clf.intercept_) nnu = NNU(args['alpha'], args['beta'], storage) nnu.build_index(D) json_dict = {} json_dict['nnu'] = nnu.to_dict() json_dict['svm'] = svm_dict if 'chunk_size' in args.keys(): chunk_size = args['chunk_size'] else: chunk_size = None json_str = chunk_json(json_dict, chunk_size=chunk_size) with open(args['output_path'], 'w+') as fp: fp.write(json_str)