def test_quadtreeOpt(data, queryShape, all_queries): global method_list, exp_name exp_name = 'quadtreeOpt' method_list = ['quad-geo'] # method_list = ['quad-baseline', 'quad-geo', 'quad-baseline-localness', 'quad-geo-localness'] res_cube_abs = np.zeros((len(eps_list), len(seed_list), len(method_list))) res_cube_rel = np.zeros((len(eps_list), len(seed_list), len(method_list))) for j in range(len(seed_list)): queryList = all_queries[j] kexp = GKExp(data, queryList) p = Params(seed_list[j]) for i in range(len(eps_list)): p.Eps = eps_list[i] for k in range(len(method_list)): if method_list[k] == 'quad-baseline': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_baseline(p) elif method_list[k] == 'quad-baseline-localness': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_baseline_localness(p) elif method_list[k] == 'quad-geo': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_geo(p) elif method_list[k] == 'quad-geo-localness': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_geo_localness(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) #np.savetxt(Params.resdir+exp_name+'_abs_'+`int(queryShape[0]*10)`+'_'+`int(queryShape[1]*10)`, res_abs_summary, fmt='%.4f\t') np.savetxt(Params.resdir + exp_name + '_rel_' + `int(queryShape[0] * 10)` + '_' + `int(queryShape[1] * 10)`, res_rel_summary, fmt='%.4f\t')
def test_grids(data, queryShape, all_queries): global method_list, exp_name exp_name = 'grids' method_list = ['grid-uniform', 'grid-adaptive'] #'grid-pure','grid-uniform','grid-adaptive','grid-adaptive-localness' res_cube_abs = np.zeros((len(eps_list), len(seed_list), len(method_list))) res_cube_rel = np.zeros((len(eps_list), len(seed_list), len(method_list))) for j in range(len(seed_list)): queryList = all_queries[j] kexp = GKExp(data, queryList) p = Params(seed_list[j]) for i in range(len(eps_list)): p.Eps = eps_list[i] for k in range(len(method_list)): if method_list[k] == 'grid-pure': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Grid_pure(p) elif method_list[k] == 'grid-uniform': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Grid_uniform(p) elif method_list[k] == 'grid-adaptive': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Grid_adaptive(p) elif method_list[k] == 'grid-adaptive-localness': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Grid_adaptive_localness(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) #np.savetxt(Params.resdir+exp_name+'_abs_'+`int(queryShape[0]*10)`+'_'+`int(queryShape[1]*10)`, res_abs_summary, fmt='%.4f\t') np.savetxt(Params.resdir + exp_name + '_rel_' + `int(queryShape[0] * 10)` + '_' + `int(queryShape[1] * 10)`, res_rel_summary, fmt='%.4f\t')
def test_kdTrees(queryShape): global methodList, exp_name exp_name = 'kdTrees' methodList = ['pure', 'true', 'standard', 'hybrid', 'cell', 'noisymean'] # Params.maxHeight = 8 epsList = [0.1, 0.5, 1.0] data = data_readin() res_cube_abs = np.zeros((len(epsList), len(seedList), len(methodList))) res_cube_rel = np.zeros((len(epsList), len(seedList), len(methodList))) for j in range(len(seedList)): queryList = queryGen(queryShape, seedList[j]) kexp = KExp(data, queryList) for i in range(len(epsList)): for k in range(len(methodList)): p = Params(seedList[j]) p.Eps = epsList[i] if methodList[k] == 'pure': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_pure(p) elif methodList[k] == 'true': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_true(p) elif methodList[k] == 'standard': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_standard(p) elif methodList[k] == 'hybrid': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_hybrid(p) elif methodList[k] == 'noisymean': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_noisymean(p) elif methodList[k] == 'cell': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Kd_cell(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) np.savetxt(Params.resdir + exp_name + '_abs_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_abs_summary, fmt='%.4f') np.savetxt(Params.resdir + exp_name + '_rel_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_rel_summary, fmt='%.4f')
def test_htrees(data, queryShape, all_queries): global method_list, exp_name exp_name = "htrees" method_list = ["ht-standard"] # method_list = ['ht-standard','ht-composite'] #'ht-pure','ht-true','ht-standard','ht-composite','ht-hybrid','ht-hybrid-skew','ht-composite-localness','ht-hybrid-localness' res_cube_abs = np.zeros((len(eps_list), len(seed_list), len(method_list))) res_cube_rel = np.zeros((len(eps_list), len(seed_list), len(method_list))) for j in range(len(seed_list)): queryList = all_queries[j] kexp = GKExp(data, queryList) p = Params(seed_list[j]) for i in range(len(eps_list)): p.Eps = eps_list[i] for k in range(len(method_list)): if method_list[k] == "ht-pure": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_pure(p) elif method_list[k] == "ht-true": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_true(p) elif method_list[k] == "ht-standard": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard(p) elif method_list[k] == "ht-composite": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_composite(p) elif method_list[k] == "ht-composite-localness": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_composite_localness(p) elif method_list[k] == "ht-hybrid": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid(p) elif method_list[k] == "ht-standard-skew": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard_skew(p) elif method_list[k] == "ht-hybrid-skew": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid_skew(p) elif method_list[k] == "ht-standard-adaptive": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard_adaptive(p) elif method_list[k] == "ht-hybrid-localness": res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid_localness(p) else: logging.error("No such index structure!") sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) # np.savetxt(Params.resdir+exp_name+'_abs_'+`int(queryShape[0]*10)`+'_'+`int(queryShape[1]*10)`, res_abs_summary, fmt='%.4f\t') np.savetxt( Params.resdir + exp_name + "_rel_" + ` int(queryShape[0] * 10) ` + "_" + ` int(queryShape[1] * 10) `, res_rel_summary, fmt="%.4f\t", )
def test_quadtreeOpt(queryShape): global methodList, exp_name exp_name = 'quadtreeOpt' methodList = ['Quad-baseline', 'Quad-geo', 'Quad-post', 'Quad-opt'] # Params.maxHeight = 10 epsList = [0.1, 0.5, 1.0] data = data_readin() res_cube_abs = np.zeros((len(epsList), len(seedList), len(methodList))) res_cube_rel = np.zeros((len(epsList), len(seedList), len(methodList))) for j in range(len(seedList)): queryList = queryGen(queryShape, seedList[j]) kexp = KExp(data, queryList) for i in range(len(epsList)): for k in range(len(methodList)): p = Params(seedList[j]) p.Eps = epsList[i] if methodList[k] == 'Quad-baseline': res_cube_abs[i, j, k], res_cube_rel[ i, j, k] = kexp.run_Quad_baseline(p) elif methodList[k] == 'Quad-geo': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_geo(p) elif methodList[k] == 'Quad-post': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_post(p) elif methodList[k] == 'Quad-opt': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_opt(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) np.savetxt(Params.resdir + exp_name + '_abs_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_abs_summary, fmt='%.4f') np.savetxt(Params.resdir + exp_name + '_rel_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_rel_summary, fmt='%.4f')
def test_htrees(data, queryShape, all_queries): global method_list, exp_name exp_name = 'htrees' method_list = ['ht-standard'] # method_list = ['ht-standard','ht-composite'] #'ht-pure','ht-true','ht-standard','ht-composite','ht-hybrid','ht-hybrid-skew','ht-composite-localness','ht-hybrid-localness' res_cube_abs = np.zeros((len(eps_list), len(seed_list), len(method_list))) res_cube_rel = np.zeros((len(eps_list), len(seed_list), len(method_list))) for j in range(len(seed_list)): queryList = all_queries[j] kexp = GKExp(data, queryList) p = Params(seed_list[j]) for i in range(len(eps_list)): p.Eps = eps_list[i] for k in range(len(method_list)): if method_list[k] == 'ht-pure': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_pure(p) elif method_list[k] == 'ht-true': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_true(p) elif method_list[k] == 'ht-standard': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard(p) elif method_list[k] == 'ht-composite': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_composite(p) elif method_list[k] == 'ht-composite-localness': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_composite_localness(p) elif method_list[k] == 'ht-hybrid': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid(p) elif method_list[k] == 'ht-standard-skew': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard_skew(p) elif method_list[k] == 'ht-hybrid-skew': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid_skew(p) elif method_list[k] == 'ht-standard-adaptive': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_standard_adaptive(p) elif method_list[k] == 'ht-hybrid-localness': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_HT_hybrid_localness(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) #np.savetxt(Params.resdir+exp_name+'_abs_'+`int(queryShape[0]*10)`+'_'+`int(queryShape[1]*10)`, res_abs_summary, fmt='%.4f\t') np.savetxt(Params.resdir + exp_name + '_rel_' + `int(queryShape[0] * 10)` + '_' + `int(queryShape[1] * 10)`, res_rel_summary, fmt='%.4f\t')
def test_quadtreeOpt(queryShape): global methodList, exp_name exp_name = 'quadtreeOpt' methodList = ['Quad-baseline', 'Quad-geo', 'Quad-post', 'Quad-opt'] # Params.maxHeight = 10 epsList = [0.1, 0.5, 1.0] data = data_readin() res_cube_abs = np.zeros((len(epsList), len(seedList), len(methodList))) res_cube_rel = np.zeros((len(epsList), len(seedList), len(methodList))) for j in range(len(seedList)): queryList = queryGen(queryShape, seedList[j]) kexp = KExp(data, queryList) for i in range(len(epsList)): for k in range(len(methodList)): p = Params(seedList[j]) p.Eps = epsList[i] if methodList[k] == 'Quad-baseline': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_baseline(p) elif methodList[k] == 'Quad-geo': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_geo(p) elif methodList[k] == 'Quad-post': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_post(p) elif methodList[k] == 'Quad-opt': res_cube_abs[i, j, k], res_cube_rel[i, j, k] = kexp.run_Quad_opt(p) else: logging.error('No such index structure!') sys.exit(1) res_abs_summary = np.average(res_cube_abs, axis=1) res_rel_summary = np.average(res_cube_rel, axis=1) np.savetxt(Params.resdir + exp_name + '_abs_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_abs_summary, fmt='%.4f') np.savetxt(Params.resdir + exp_name + '_rel_' + str(int(queryShape[0] * 10)) + '_' + str(int(queryShape[1] * 10)), res_rel_summary, fmt='%.4f')
error = 0 for i in range(len(publish)): if orig[i] < 0: error += distance(publish[i], orig[i]) elif orig[i] == 0: error += distance(publish[i], 1) else: error += distance(max(publish[i], 0), orig[i]) return error / len(publish) for q in q_list: for i in range(len(eps_list)): with open("../log/foursquare/true_count_KF_" + str(eps_list[i]) + ".log") as f: rel_errs = [] for line in f.readlines(): orig = map(float, line.strip().split("\t")) p = Params(1000) p.Eps = eps_list[i] kf = KalmanFilterPID(p) kf.setQ(q) budgetKF = budgetKF = eps_list[i] / 2 # filter = kf.kalmanFilter(seq, budgetKF, p.samplingRate) publish = kf.kalmanFilter(orig, budgetKF) rel_err = getRelError(publish, orig) rel_errs.append(rel_err) print q, "\t", eps_list[i], "\t", sum(rel_errs) / len(rel_errs)
# eps_list = [0.001, 0.004, 0.007, 0.01] # dataset_list = ['yelp', 'foursquare', 'gowallasf', 'gowallala'] eps_list = [0.05, 0.45] dataset_list = ['gowallasf'] for dataset in dataset_list: for eps in eps_list: param = Params(1000) all_workers = data_readin(param) param.NDIM, param.NDATA = all_workers.shape[0], all_workers.shape[1] param.LOW, param.HIGH = np.amin(all_workers, axis=1), np.amax(all_workers, axis=1) param.DATASET = dataset param.select_dataset() param.Eps = eps param.debug() path_data = getPathData(all_workers, param) # max_count = 0 # for data in path_data: # if data[1] > max_count: # max_count = data[1] fig, ax = plt.subplots() # img = imread("background.png") for data in path_data: path = data[0] codes, verts = zip(*path) path = mpath.Path(verts, codes)
eps_list = [0.05, 0.45] dataset_list = ['gowallasf'] for dataset in dataset_list: for eps in eps_list: param = Params(1000) all_workers = data_readin(param) param.NDIM, param.NDATA = all_workers.shape[0], all_workers.shape[ 1] param.LOW, param.HIGH = np.amin(all_workers, axis=1), np.amax(all_workers, axis=1) param.DATASET = dataset param.select_dataset() param.Eps = eps param.debug() path_data = getPathData(all_workers, param) # max_count = 0 # for data in path_data: # if data[1] > max_count: # max_count = data[1] fig, ax = plt.subplots() # img = imread("background.png") for data in path_data: path = data[0] codes, verts = zip(*path) path = mpath.Path(verts, codes)