Esempio n. 1
0
	parser.add_argument('--log_dir', type=str, default='./output')
	parser.add_argument('--connection_radius', type=float, default=5.0)
	parser.add_argument('--map_type', type=str, default="intel")
	parser.add_argument('--gamma_prm', type=float, default=30.0)
	#EDIT PRM STAR PARAM
	args = parser.parse_args()
	args.log_dir = './output/max_nodes-' + args.map_type + str(args.max_nodes) + "-obs-thres" + str(args.obstacle_threshold) +\
				   "-k_nearest-" + \
				   str(args.k_nearest) + "-connection_radius-" + str(args.connection_radius) + "-date-" + \
				   datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + '/'

	if not os.path.exists(args.log_dir):
		os.makedirs(args.log_dir)
	tic = time.time()
	# load map
	map_data, resolution = load_hilbert_map(map_type=args.map_type)
	#resolution = 0.2
	#with open("freiburg_ground_map_q_resolution_final.pickle", 'rb') as tf:
	#	map_data = pickle.load(tf)
	map_array = convert_map_dict_to_array(map_data, resolution)
	map_data["yq"] = 1.0 * (map_data["yq"] > args.obstacle_threshold)
	# get samples from hilbert maps
	sample_list = hilbert_samples(map_data.copy(), args.exp_factor, args.obstacle_threshold, num_samples=args.number_of_samples)
	# take unique samples
	sample_list = [list(t) for t in set(tuple(element) for element in sample_list)]
	# truncated based on max nodes
	sample_list = sample_list[:args.max_nodes]
	# find k nearest neighbor
	nbrs = NearestNeighbors(n_neighbors=args.k_nearest, algorithm='ball_tree').fit(sample_list)
	distances, indices = nbrs.kneighbors(sample_list)
	# create gragh
Esempio n. 2
0
"""
Author: Manish Saroya
Contact: [email protected]
"""
from prm import hilbert_samples
import pickle
from persistence.utils import load_hilbert_map
import matplotlib.pyplot as plt
if __name__ == "__main__":
    # load map
    #with open("freiburg_ground_map_q_resolution_final.pickle", 'rb') as tf:
    # with open("freiburg_ground_map_q_resolution_final.pickle", 'rb') as tf:
    #     map_data = pickle.load(tf)
    # resolution = 0.3
    obstacle_threshold = 0.25
    map_data, resolution = load_hilbert_map("intel")
    #map_data["yq"] = 1.0 * (map_data["yq"] > 0.45)
    fig = plt.figure(figsize=(40 / 4, 35 / 4))
    plt.axis("equal")
    #plt.style.use('seaborn-dark')
    plt.scatter(map_data['Xq'][:, 0],
                map_data['Xq'][:, 1],
                c=map_data['yq'],
                cmap="jet",
                s=(70 / 0.3) * resolution * 0.2,
                vmin=0,
                vmax=1,
                edgecolors='')
    #plt.scatter(map_data['Xq'][:, 0], map_data['Xq'][:, 1], c=map_data['yq'], s=10, vmin=0, vmax=1, edgecolors='')
    plt.colorbar(fraction=0.047, pad=0.02)
    plt.show()
Esempio n. 3
0
	#map_data['yq'] = np.exp(exp_factor * map_data['yq'])

	for index in range(len(map_data["yq"])):
		map_data["yq"][index] *= rv.pdf(map_data["Xq"][index])

	# normalize the probabilities
	map_data['yq'] /= np.linalg.norm(map_data['yq'], ord=1)

	samples_list = map_data['Xq'][np.random.choice(len(map_data['Xq']), size=num_samples, p=map_data['yq'])]
	return samples_list




if __name__=="__main__":
	map_data_, resolution = load_hilbert_map(map_type="drive")
	samples = get_samples(map_data_, [0, 0], scale=15, num_samples=6000)
	samples_plot(samples)

	# pose = [[0, 40], [0, 0]]
	# rvlist = []
	# for p in pose:
	# 	rvlist.append(multivariate_normal(mean=p, cov=[[225.0, 0.], [0., 225.0]]))
	# #rv = multivariate_normal([.0, 40.0], [[225.0, 0.], [0., 225.0]])
	# map_data['yq'] = np.ones(len(map_data['yq'])) - map_data['yq']
	#
	# map_data['yq'] = np.exp(20 * map_data['yq'])
	# for index in range(len(map_data["yq"])):
	# 	map_data["yq"][index] *= rv.pdf(map_data["Xq"][index])
	# # normalize the probabilities
	# map_data['yq'] /= np.linalg.norm(map_data['yq'], ord=1)
Esempio n. 4
0
# 			if local_distance > distance:
# 				local_graph.add_node(indx, pos=pose)
#
# 		for node1, node2 in gng_.edges:
# 			if (node1 in local_graph.nodes) and (node2 in local_graph.nodes):
# 				local_graph.add_edge(node1, node2)
#
# 		is_connected, num_components = count_components(local_graph)
#
# 		print(f_indx, is_connected, num_components)
# 		topological_accuracy_0hom.append(is_connected)
# 	return topological_accuracy_0hom

if __name__ == "__main__":
    path = "output/exp_factor-freiburg20-is-bias-sampling-True-bias_ratio-0.75-max_epoch-400-max_edge_age-50-date-2020-10-07-13-12-23/gng300.pickle"
    data, resolution = load_hilbert_map(map_type="freiburg")
    map_array = convert_map_dict_to_array(data, resolution)
    gng_ = convert_gng_to_nxgng(load_graph(path), map_array, 0.6, resolution)
    feature, persistence_1hom_weights = get_top_n_persistence_node_location(
        5, "freiburg", location_type="death", feature_type=0)
    # with open(path, 'rb') as tf:
    # 	g = pickle.load(tf)

    # create the graph based on the feature nearby area
    # topological_accuracy_0hom = []
    # position = nx.get_node_attributes(gng_, 'pos')
    # local_distance = 1.1
    # ######### write code for inside detection
    # for f_indx, f in enumerate(feature):
    # 	local_graph = nx.Graph()
    # 	for indx, node in enumerate(gng_.nodes):
Esempio n. 5
0
def get_top_n_persistence_node_location(n,
                                        map_type,
                                        obs_threshold,
                                        location_type="death",
                                        feature_type=0):
    """
    :param feature_type: 0 for connected components, 1 for loops
    :param n: top number of persistence
    :param map_type: intel or drive
    :param location_type: string representing birth or death
    :return: returns the birth or death persistence node
    """
    if location_type == "death":
        location_type_index = 1
    elif location_type == "birth":
        location_type_index = 0
    else:
        raise ValueError("Invalid location type")

    map_data, resolution = load_hilbert_map(map_type=map_type)
    map_array = convert_map_dict_to_array(map_data, resolution)

    fc = FreudenthalComplex(map_array)
    st = fc.init_freudenthal_2d()
    print_complex_attributes(st)

    if st.make_filtration_non_decreasing():
        print("modified filtration value")
    st.initialize_filtration()
    if len(st.persistence()) <= 10:
        for i in st.persistence():
            print(i)

    first_persistence = st.persistence_intervals_in_dimension(feature_type)
    if feature_type == 1:
        remove_indices = []
        for i in range(len(first_persistence)):
            if first_persistence[i][0] > obs_threshold:
                remove_indices.append(i)
        first_persistence = np.delete(first_persistence, remove_indices, 0)
    if feature_type == 0:
        remove_indices = []
        for i in range(len(first_persistence)):
            if first_persistence[i][1] > obs_threshold:
                remove_indices.append(i)
        first_persistence = np.delete(first_persistence, remove_indices, 0)
        # remove feature ending after 0.4
    life_span = first_persistence[:, 1] - first_persistence[:, 0]
    winner_index = life_span.argsort()[-n:][::-1]
    print("len winner index ", len(winner_index))
    #print(life_span)
    winner_persistence = first_persistence[winner_index]
    print(winner_persistence, "winner_persistence")
    top_persistence_node = []
    for indx, intensity in enumerate(map_data['yq']):
        for j in range(n):
            p = winner_persistence[j]
            # if np.isclose(intensity, p[1]):
            #     top_persistence_node.append(map_data["Xq"][indx])
            if np.isclose(intensity,
                          p[location_type_index],
                          rtol=1e-10,
                          atol=1e-13):
                top_persistence_node.append(map_data["Xq"][indx])
                print(j, intensity)
    #return winner_persistence, life_span[winner_index]
    return top_persistence_node, life_span[winner_index]