def check_all_half_lp(graph: nx.Graph, old_lpopt): nodes = set(graph.nodes_iter()) for u in graph.nodes_iter(): lpsoln = utils.halfint_lpvc(graph.subgraph(nodes - {u})) lpopt = sum(lpsoln.values()) if lpopt <= old_lpopt - 1: return False return True
def degreeone(graph: nx.Graph, k: int = -1): """ Degree 1 vertices (RR2) :param graph: graph to reduce :param k: parameter k :return: 4-tuple (flag, changed, new_graph, vc) where flag=false denotes that this is a NO-instance and vice-versa, changed indicates if the reduction rule caused any change, new_graph is the reduced subgraph, vc is the partial vc constructed while applying reduction rules """ deg_one = set() for v, deg in graph.degree_iter(): if deg == 1: deg_one.add(v) if len(deg_one) == 0: # no degree 1 vertices found, so no change return True, False, graph, set() vc = set() to_delete = set() while deg_one: v = deg_one.pop() if v in vc: continue neigh = utils.first(graph.neighbors_iter(v)) to_delete.add(v) vc.add(neigh) if len(vc) > k: # NO-instance return False, False, graph, set() nodes = set(graph.nodes_iter()) new_graph = graph.subgraph(nodes - vc - to_delete) return True, True, new_graph, vc
def dfs(g: networkx.Graph, v: int = None): """ traverses in a depth-first-search manner the graph :param g: the graph to traverse (Note: the graph is assumed to have a single component) :param v: the node to start traversal from. if not given, the first in g.nodes_iter() is used :return: the farthest node in the traversal, and the path length from that """ stack.clear() max_height = 0 furthest_node = None if v is None: v = next(g.nodes_iter()) visited = {v} stack.append(v) while not len(stack) == 0: u = stack[-1] u_has_unvisited_neighbors = False for w in g.neighbors(u): if w not in visited: visited.add(w) stack.append(w) u_has_unvisited_neighbors = True break if len(stack) > max_height: max_height = len(stack) furthest_node = stack[-1] if not u_has_unvisited_neighbors: stack.pop() return furthest_node, max_height
def clustering_and_average_clustering(graph: nx.Graph) -> (List[float], float): """ Returns the clustering coefficient of each node of network and the average clustering coefficient. Parameters ---------- graph: a NetworkX graph object Returns ------- clustering: list clustering coefficient of each network node in a average_clustering: floating point number average clustering coefficient """ clustering = [] for node in graph.nodes_iter(): neighbors = nx.neighbors(G=graph, n=node) clustering_coef = 0 n_links = 0 if len(neighbors) > 1: for neighbor1 in neighbors: for neighbor2 in neighbors: if graph.has_edge(u=neighbor1, v=neighbor2): n_links += 1 clustering_coef = n_links / (len(neighbors) * (len(neighbors) - 1)) clustering.append(clustering_coef) average_clustering = sum(clustering) / float(len(clustering)) return clustering, average_clustering
def planar_independent_set( graph: Graph, black_list: List = list(), degree_lim=INDEPENDENT_SET_LEMMA_DEGREE) -> Set[Point]: """ Assuming the graph is planar, then computes an independent set of size at least n/18 in which every node has degree at most 8. O(n) time. (Note: assume the graph is planar because it is computationally expensive to check) :param graph: networkx Graph (assumed planar) :param black_list: list of nodes that should not be in the independent set :param degree_lim: nodes in independent set have degree at most this :return: set of graph nodes """ unmarked_nodes = { node for node in graph.nodes_iter() if graph.degree(node) <= degree_lim and node not in black_list } independent_set = set() while len(unmarked_nodes) > 0: node = unmarked_nodes.pop() independent_set.add(node) unmarked_nodes -= set(graph.neighbors(node)) return independent_set
def vertex_branching(graph: nx.Graph, budget: int): #print("in branching start:", graph.number_of_nodes(), graph.number_of_edges(), "budget", budget, file=sys.stderr) flag, graph, vc, folded_verts = reduction.rrhandler(graph, budget) #print("in branching vc:", vc, "fold_verts:", folded_verts, "new graph", graph.number_of_nodes(), file=sys.stderr) if not flag: # NO instance return False, set() budget -= len(vc) + len(folded_verts) num_edges = graph.number_of_edges() if budget < 0: return False, set() if budget <= 0 and num_edges > 0: return False, set() elif num_edges == 0: return True, utils.unfold(vc, folded_verts) else: # budget > 0 and num_edges > 0 cur_node, max_degree = max(graph.degree_iter(), key=itemgetter(1)) # print("branching on vertex ", cur_node, "of degree", max_degree, file=sys.stderr) nbrs = set(graph.neighbors_iter(cur_node)) flip = random.random() < 0.5 # flip coin to decide branch order if flip: branches = [(nbrs, len(nbrs)), (set(), 1)] else: branches = [(set(), 1), (nbrs, len(nbrs))] nodes = set(graph.nodes_iter()) for branch, budget_change in branches: if budget_change > budget: continue res, vc_new = vertex_branching( graph.subgraph(nodes - {cur_node} - branch), budget - budget_change) if res: final_vc = vc | vc_new | (branch or {cur_node}) return True, utils.unfold(final_vc, folded_verts) return False, set()
def get_centrality_measures(network: nx.Graph, tol: float): """ Calculates five centrality measures (degree, betweenness, closeness, and eigenvector centrality, and k-shell) for the nodes of the given network. use NetworkX functions to obtain centrality measure dictionaries sort the dictionary values into arrays in the order given by network.nodes(). Hint: make use of get method of dictionaries. Parameters ---------- network: networkx.Graph() tol: tolerance parameter to calculate eigenvector centrality, float Returns -------- [degree, betweenness, closeness, eigenvector_centrality, kshell]: list of lists """ degree, betweenness, closeness, eigenvector_centrality, kshell = [], [], [], [], [] betweenness_dict = nx.betweenness_centrality(network) closeness_dict = nx.closeness_centrality(network) eigenvector_centrality_dict = nx.eigenvector_centrality(network, tol=tol) kshell_dict = nx.core_number(network) for n in network.nodes_iter(): degree.append(network.degree(n)) betweenness.append(betweenness_dict[n]) closeness.append(closeness_dict[n]) eigenvector_centrality.append(eigenvector_centrality_dict[n]) kshell.append(kshell_dict[n]) return [degree, betweenness, closeness, eigenvector_centrality, kshell]
def get_true_top_20pct_minority_grp(g: nx.Graph, ) -> float: record_list = [] for idx, attr in g.nodes_iter(data=True): record = {'degree': g.degree(idx), 'group': attr['group'] == 'b'} record_list.append(record) df = pd.DataFrame(record_list) df = df.sort_values('degree', ascending=False) df = df.head(math.floor(df.shape[0] / 5)) return df['group'].mean()
def get_top_20pct_true(g: nx.Graph) -> pd.DataFrame: degree_dict = g.degree() record_list = [] for idx, attr in g.nodes_iter(data=True): attr['degree'] = degree_dict[idx] attr['idx'] = idx record_list.append(attr) df = pd.DataFrame(record_list) return get_top_20pct(df)
def halfint_lpvc2(graph: nx.Graph): """ Deprecated half integral LP solver """ n = graph.number_of_nodes() doubled = make_double(graph) print("double graph created") mates = bipartite.maximum_matching(doubled) print("matching found") vc = bipartite.to_vertex_cover(doubled, mates) print("vc found") lpval = {v: 0.5 * ((v in vc) + (v + n in vc)) for v in graph.nodes_iter()} return lpval
def brute_ind_subsets(graph: nx.Graph): for ss in utils.powerset(graph.nodes_iter()): edge_found = False for u in ss: for v in ss: if u == v: continue if v in graph[u]: # edge between u and v edge_found = True break if edge_found: break # break out of subset if edge_found: # try next subset continue else: # independent subset yield ss
def gen_ind_subsets(graph: nx.Graph): m = graph.number_of_edges() if m == 0: # given graph is already ind. so all subsets are ind. for ss in utils.powerset(graph.nodes_iter()): yield set(ss) # at least one edge in graph u, v = utils.first(graph.edges_iter()) stack = [((u, v), 0, set(), set())] while stack: # print("popped", stack[-1], "remaining", stack[:-1]) cur_edge, state, discard, include = stack.pop() if state > 2: # ignore invalid state continue u, v = cur_edge if state == 0: new_discard = discard | set(graph.neighbors_iter(u)) new_include = include | {u} elif state == 1: new_discard = discard | set(graph.neighbors_iter(v)) new_include = include | {v} else: new_discard = discard | {u, v} new_include = include nodes = set(graph.nodes_iter()) # print("checking subgraph on", nodes - new_discard - new_include) new_graph = graph.subgraph(nodes - new_discard - new_include) # print("found", new_graph.number_of_edges(), "edges") stack.append((cur_edge, state + 1, discard, include)) if new_graph.number_of_edges() == 0: # found ind. set # print("yielding", new_include|new_graph.nodes) for subset in utils.powerset(new_graph.nodes_iter()): yield new_include | set(subset) else: new_edge = utils.first(new_graph.edges_iter()) # print("appending", new_edge) stack.append((new_edge, 0, new_discard, new_include))
def degreedel(graph: nx.Graph, k: int): """ Degree 0 and degree greater than k vertices (RR1) :param graph: graph to reduce :param k: parameter k :return: 4-tuple (flag, changed, new_graph, vc) where flag=false denotes that this is a NO-instance and vice-versa, changed indicates if the reduction rule caused any change, new_graph is the reduced subgraph, vc is the partial vc constructed while applying reduction rules """ n = graph.number_of_nodes() # print('n:', n) # print('V',v) degree_dir = graph.degree_iter() # print('degree_dir', degree_dir) to_delete = set() # delete degree 0 vc = set() # add degree>k to vc for v, deg in degree_dir: if deg == 0: to_delete.add(v) if deg > k: vc.add(v) # handling high degree vertices len_vc = len(vc) #graph.remove_nodes_from(deg_large) if len_vc > k: # more than k high-degree => NO-instance return False, False, graph, set() # print('VC-del:', vc) # repeatedly applying reduction rules if len_vc + len(to_delete) == 0: # no improvement in this case return True, False, graph, vc else: nodes = set(graph.nodes_iter()) new_graph = graph.subgraph(nodes - vc - to_delete) #graph_new, vc_small, k, flag = degreedel(graph, k) # print('vc_small, vc_new', vc_small, vc_new) #c = vc.union(vc_small) # print('c', c) #graph = graph_new return True, True, new_graph, vc
def edge_branching(graph: nx.Graph, budget: int): nodes = set(graph.nodes_iter()) num_edges = graph.number_of_edges() if budget <= 0 and num_edges: return False, set() elif num_edges == 0: return True, set() else: # budget > 0 and num_edges > 0 cur_edge = utils.first(graph.edges_iter()) u, v = cur_edge for picked in [u, v]: res, vc = edge_branching(graph.subgraph(nodes - {picked}), budget - 1) if res: return True, vc | {picked} return False, set()
def get_degrees(graph: nx.Graph) -> List[int]: """ Returns the degree of each node of network. Parameters ---------- graph: a NetworkX graph object Returns ------- degrees: list degrees of all network node """ deg = [] for node in graph.nodes_iter(): deg.append(len(graph.neighbors(n=node))) return deg
def halfint_lpvc(graph: nx.Graph, debug=False): def log(*args): if debug: print(*args) offset = graph.graph['max_id'] log("highest node", max(graph.nodes()), "offset", offset) doubled = make_double(graph) log("highest node", max(doubled.nodes()), "offset", offset) log("doubled graph created") mates = nx.algorithms.bipartite.maximum_matching(doubled) log("matching found") unsat = set(doubled.nodes_iter()) - mates.keys() vc = {i: 0 for i in graph.nodes_iter()} if unsat: log('phase1') while unsat: v = unsat.pop() # neigh = list(doubled.neighbors(v)) neigh = doubled.neighbors(v) # intentionally kept as list not iterator # vc.update(neigh) # unsat.update(mates[nv] for nv in neigh) # mark mates of neighbors as unsat for nv in neigh: vc[(nv - 1) % offset + 1] += 0.5 unsat.add(mates[nv]) # mark mates of neighbors as unsat doubled.remove_nodes_from(neigh) doubled.remove_node(v) # print("stopped at vc:", vc, "nodes:", doubled.nodes()) # now no unsaturated vertices left # graph not empty => remaining perfect matching if doubled.number_of_nodes() > 0: # if left half chosen log('phase2') for v in doubled.nodes_iter(): if v <= offset: vc[(v - 1) % offset + 1] += 0.5 log("final vc:", set(vc.values())) log("sum:", sum(vc.values())) return vc
class ClusterNetwork(object): def __init__(self, reps): self.g = Graph() self.N = len(reps.keys()) nodes = [] self.lookup = {} self.attributes = None for i, r in enumerate(sorted(reps.keys())): self.lookup[r] = i if self.attributes is None: self.attributes = list(reps[r].attributes.keys()) nodes.append((i, {'rep': reps[r]})) self.g.add_nodes_from(nodes) self.clusters = None def __iter__(self): for i, d in self.g.nodes_iter(data=True): yield d def __len__(self): return self.N def __getitem__(self, key): if isinstance(key, str): return self.g.node[self.lookup[key]] elif isinstance(key, tuple): return self.simMat[key] return self.g.node[key] def cluster(self, scores, cluster_method, oneCluster): #Clear any edges self.g.remove_edges_from(list(self.g.edges_iter(data=False))) if cluster_method is None: return if scores is not None: self.simMat = zeros((self.N, self.N)) for k, v in scores.items(): indOne = self.lookup[k[0]] indTwo = self.lookup[k[1]] self.simMat[indOne, indTwo] = v self.simMat[indTwo, indOne] = v self.simMat = -1 * self.simMat if cluster_method == 'affinity': true_labels = array( [self[i]['rep']._true_label for i in range(self.N)]) self.clusters = affinity_cluster(self.simMat, true_labels, oneCluster) edges = [] for k, v in self.clusters.items(): for v2 in v: if v2[0] == k: continue edges.append((k, v2[0], v2[1])) elif cluster_method == 'complete': edges = [] for i in range(self.N): for j in range(i + 1, self.N): edges.append((i, j, self.simMat[i, j])) self.g.add_weighted_edges_from(edges) seed = RandomState(seed=3) mds = manifold.MDS(n_components=2, max_iter=3000, eps=1e-9, random_state=seed, dissimilarity="precomputed", n_jobs=4) pos = mds.fit(-1 * self.simMat).embedding_ clf = PCA(n_components=2) pos = clf.fit_transform(pos) for i, p in enumerate(pos): self.g.node[i]['pos'] = p def calc_reduction(self): if self.clusters is None: return means = {} reverse_mapping = {} for k, v in self.clusters.items(): s = 0 for ind in v: reverse_mapping[ind[0]] = k s += ind[1] means[k] = s / len(v) for i in self.g.nodes_iter(): clust_center = reverse_mapping[i] if i == clust_center: self.g.node[i]['HyperHypoMeasure'] = 0 continue dist = self.g[i][clust_center]['weight'] norm_dist = abs(dist - means[clust_center]) len_diff = self[clust_center]['representation'].shape[0] - self[i][ 'representation'].shape[0] if len_diff < 0: norm_dist *= -1 self.g.node[i]['HyperHypoMeasure'] = norm_dist if 'HyperHypoMeasure' not in self.attributes: self.attributes.append('HyperHypoMeasure') def get_edges(self): return array(self.g.edges(data=False)) def labels(self): labels = list(range(len(self.g))) for k, v in self.clusters.items(): for v2 in v: labels[v2[0]] = k true_labels = list() for i in range(len(labels)): true_labels.append(self[i]['rep']._true_label) levels = {x: i for i, x in enumerate(set(true_labels))} for i in range(len(true_labels)): true_labels[i] = levels[true_labels[i]] return array(labels), array(true_labels) def silhouette_coefficient(self): labels, true_labels = self.labels() return metrics.silhouette_score(self.simMat, labels, metric='precomputed') def homogeneity(self): labels, true_labels = self.labels() return metrics.homogeneity_score(true_labels, labels) def completeness(self): labels, true_labels = self.labels() return metrics.completeness_score(true_labels, labels) def v_score(self): labels, true_labels = self.labels() return metrics.v_measure_score(true_labels, labels) def adjusted_mutual_information(self): labels, true_labels = self.labels() return metrics.adjusted_mutual_info_score(true_labels, labels) def adjusted_rand_score(self): labels, true_labels = self.labels() return metrics.adjusted_rand_score(true_labels, labels)
class InteractiveWaypointServer(object): def __init__(self): self.server = InteractiveMarkerServer("interactive_waypoint_server") self.waypoint_graph = Graph() self.next_waypoint_id = 0 self.next_edge_id = 0 self.state = STATE_REGULAR self.connect_first_marker = "" self.edge_line_publisher = rospy.Publisher( "/interactive_waypoint_server/edges", MarkerArray, queue_size=10) self.knowledge_service = rospy.ServiceProxy( "/kcl_rosplan/update_knowledge_base_array", KnowledgeUpdateServiceArray) self.message_store = message_store.MessageStoreProxy() # clear the database entries = self.message_store.query(PoseStamped._type, single=False) for entry in entries: id = entry[1]["_id"] self.message_store.delete(str(id)) def insertMarkerCallback(self, pos): rospy.logdebug( "(Interactive_Waypoint_Server) Inserting new waypoint at position ({0},{1},{2})." .format(pos.point.x, pos.point.y, pos.point.z)) self.insertMarker(pos.point) def clearAllMarkers(self): edges = MarkerArray() marker = Marker() marker.header.stamp = rospy.Time.now() marker.header.frame_id = "map" marker.ns = "waypoint_edges" marker.id = 0 marker.action = Marker.DELETEALL edges.markers.append(marker) self.edge_line_publisher.publish(edges) # clear databases for node, data in self.waypoint_graph.nodes_iter(data=True): self.message_store.delete(data["id"]) self.knowledge_service( update_type=self.knowledge_service.request_class. REMOVE_KNOWLEDGE, knowledge=[ KnowledgeItem(knowledge_type=KnowledgeItem.INSTANCE, instance_type="waypoint", instance_name=node) ]) def _makeMarker(self, msg): marker = Marker() marker.type = Marker.SPHERE marker.scale.x = msg.scale * 0.45 marker.scale.y = msg.scale * 0.45 marker.scale.z = msg.scale * 0.45 marker.color.r = 0 marker.color.g = 1 marker.color.b = 0 marker.color.a = 1.0 return marker def _makeEdge(self, scale, begin, end): marker = Marker() marker.header.frame_id = "map" marker.header.stamp = rospy.Time.now() marker.ns = "waypoint_edges" marker.id = self.next_edge_id self.next_edge_id += 1 marker.type = Marker.LINE_LIST marker.action = Marker.ADD marker.scale.x = scale * 0.45 marker.scale.y = scale * 0.45 marker.scale.z = scale * 0.45 marker.color.r = 0 marker.color.g = 0 marker.color.b = 1 marker.color.a = 1.0 marker.points.append(begin) marker.points.append(end) return marker def _removeMarker(self, name): if self.knowledge_service( update_type=self.knowledge_service.request_class. REMOVE_KNOWLEDGE, knowledge=[ KnowledgeItem(knowledge_type=KnowledgeItem.INSTANCE, instance_type="waypoint", instance_name=name) ]): self._clearMarker(name) # id = self.message_store.insert_named(int_marker.name, pstamped) worked = self.message_store.delete( self.waypoint_graph.node[name]["id"]) self.waypoint_graph.remove_node(name) self.server.erase(name) self.server.applyChanges() else: rospy.logerr( "(Interactive_Waypoint_Server) Failed to remove waypoint {0}. Knowledge database service call returned error." .format(name)) def _clearMarker(self, name): # remove all edges to a waypoint edges = MarkerArray() to_remove = [] for u, v, data in self.waypoint_graph.edges(name, data=True): marker = data["marker"] marker.action = Marker.DELETE to_remove.append((u, v, marker)) # update knowledge database to remove all edges knowledgelist = [] for u, v, marker in to_remove: knowledgelist.extend( self._removeEdgefromKnowledgeBase(u, v, ret_knowledge_only=True)) if self.knowledge_service(update_type=self.knowledge_service. request_class.REMOVE_KNOWLEDGE, knowledge=knowledgelist): for u, v, marker in to_remove: edges.markers.append(marker) self.waypoint_graph.remove_edge(u, v) self.edge_line_publisher.publish(edges) # publish deletion def _addEdgeToKnowledgeBase(self, first, second): firstpos = self.server.get(first).pose.position secondpos = self.server.get(second).pose.position dist = euclidean_distance(firstpos, secondpos) return self.knowledge_service( update_type=self.knowledge_service.request_class.ADD_KNOWLEDGE, knowledge=[ KnowledgeItem( knowledge_type=KnowledgeItem.FACT, # first -> second attribute_name="connected", values=[ KeyValue(key="from", value=first), KeyValue(key="to", value=second) ]), KnowledgeItem( knowledge_type=KnowledgeItem.FACT, # second -> first attribute_name="connected", values=[ KeyValue(key="from", value=second), KeyValue(key="to", value=first) ]), KnowledgeItem( knowledge_type=KnowledgeItem.FUNCTION, # second -> first attribute_name="distance", values=[ KeyValue(key="from", value=first), KeyValue(key="to", value=second) ], function_value=dist), KnowledgeItem( knowledge_type=KnowledgeItem.FUNCTION, # second -> first attribute_name="distance", values=[ KeyValue(key="from", value=second), KeyValue(key="to", value=first) ], function_value=dist) ]) def _removeEdgefromKnowledgeBase(self, first, second, ret_knowledge_only=False): knowledge = [ KnowledgeItem( knowledge_type=KnowledgeItem.FACT, # first -> second attribute_name="connected", values=[ KeyValue(key="from", value=first), KeyValue(key="to", value=second) ]), KnowledgeItem( knowledge_type=KnowledgeItem.FACT, # second -> first attribute_name="connected", values=[ KeyValue(key="from", value=second), KeyValue(key="to", value=first) ]), KnowledgeItem( knowledge_type=KnowledgeItem.FUNCTION, # second -> first attribute_name="distance", values=[ KeyValue(key="from", value=first), KeyValue(key="to", value=second) ], function_value=0), KnowledgeItem( knowledge_type=KnowledgeItem.FUNCTION, # second -> first attribute_name="distance", values=[ KeyValue(key="from", value=second), KeyValue(key="to", value=first) ], function_value=0) ] if ret_knowledge_only: return knowledge else: return self.knowledge_service(update_type=self.knowledge_service. request_class.REMOVE_KNOWLEDGE, knowledge=knowledge) def removeConnectionServiceCall(self, request): first = request.first second = request.second if self.waypoint_graph.has_edge(first, second): self._connectMarkers(first, second) return RemoveConnectionResponse() def _connectMarkers(self, first, second): if self.waypoint_graph.has_edge(first, second): # update knowledge base to remove edge if self._removeEdgefromKnowledgeBase(first, second): # delete edge edges = MarkerArray() marker = self.waypoint_graph.get_edge_data(first, second)["marker"] marker.action = Marker.DELETE edges.markers.append(marker) self.waypoint_graph.remove_edge(first, second) self.edge_line_publisher.publish(edges) # publish deletion else: rospy.logerr( "(Interactive_Waypoint_Server) Failed to remove edge between {0} and {1}. Knowledge database service call returned error." .format(first, second)) else: firstpos = self.server.get(first).pose.position secondpos = self.server.get(second).pose.position # update knowledge base to insert edge if self._addEdgeToKnowledgeBase(first, second): marker = self._makeEdge(0.2, firstpos, secondpos) # insert edge into graph self.waypoint_graph.add_edge(first, second, { "first": first, "marker": marker }) self.updateEdges() else: rospy.logerr( "(Interactive_Waypoint_Server) Failed to insert edge between {0} and {1}. Knowledge database service call returned error." .format(first, second)) def updateEdges(self): edges = MarkerArray() for u, v, data in self.waypoint_graph.edges_iter(data=True): edges.markers.append(data["marker"]) self.edge_line_publisher.publish(edges) def processFeedback(self, feedback): if feedback.event_type == InteractiveMarkerFeedback.MENU_SELECT: handle = feedback.menu_entry_id if handle == MENU_CONNECT: self.state = STATE_CONNECT self.connect_first_marker = feedback.marker_name elif handle == MENU_CLEAR: self._clearMarker(feedback.marker_name) elif handle == MENU_REMOVE: self._removeMarker(feedback.marker_name) elif feedback.event_type == InteractiveMarkerFeedback.MOUSE_UP: if self.state == STATE_CONNECT: self.state = STATE_NONE self._connectMarkers(self.connect_first_marker, feedback.marker_name) elif self.state == STATE_NONE: pass #ignore else: pos = feedback.pose.position rospy.logdebug( "(Interactive_Waypoint_Server) Updateing pose of marker {3} to ({0},{1},{2})" .format(pos.x, pos.y, pos.z, feedback.marker_name)) # update database # push to scene database pstamped = PoseStamped() pstamped.header.frame_id = "map" pstamped.pose = feedback.pose self.message_store.update_named(feedback.marker_name, pstamped) elif feedback.event_type == InteractiveMarkerFeedback.MOUSE_DOWN: if self.state == STATE_NONE: self.state = STATE_REGULAR self.server.applyChanges() def moveFeedback(self, feedback): pose = feedback.pose self.server.setPose(feedback.marker_name, pose) # correct all edges for u, v, data in self.waypoint_graph.edges([feedback.marker_name], data=True): if feedback.marker_name == data["first"]: data["marker"].points[0] = pose.position else: data["marker"].points[1] = pose.position self.updateEdges() self.server.applyChanges() def insertMarker(self, position, name=None, frame_id="map"): if name is None: name = "wp{0}".format(self.next_waypoint_id) self.next_waypoint_id += 1 # update rosplan knowledge base and insert waypoint if successful if self.knowledge_service( update_type=self.knowledge_service.request_class.ADD_KNOWLEDGE, knowledge=[ KnowledgeItem(knowledge_type=KnowledgeItem.INSTANCE, instance_type="waypoint", instance_name=name) ]): int_marker = InteractiveMarker() int_marker.header.frame_id = frame_id int_marker.pose.position = position int_marker.scale = 0.5 int_marker.name = name int_marker.description = "Waypoint: {0}".format(name) # markers a moveable in the plane control = InteractiveMarkerControl() control.orientation.w = 1 control.orientation.x = 0 control.orientation.y = 1 control.orientation.z = 0 control.interaction_mode = InteractiveMarkerControl.MOVE_PLANE menu_handler = MenuHandler() menu_handler.insert("Connect/Disconnect", callback=self.processFeedback) menu_handler.insert("Clear", callback=self.processFeedback) menu_handler.insert("Remove", callback=self.processFeedback) # make a box which also moves in the plane control.markers.append(self._makeMarker(int_marker)) control.always_visible = True int_marker.controls.append(control) # we want to use our special callback function self.server.insert(int_marker, self.processFeedback) menu_handler.apply(self.server, int_marker.name) # set different callback for POSE_UPDATE feedback self.server.setCallback(int_marker.name, self.moveFeedback, InteractiveMarkerFeedback.POSE_UPDATE) self.server.applyChanges() # push to scene database pstamped = PoseStamped() pstamped.header.frame_id = frame_id pstamped.pose = self.server.get(int_marker.name).pose pstamped.pose.orientation.w = 1.0 # otherwise movebase will reject the quaternion id = self.message_store.insert_named(int_marker.name, pstamped) # insert into graph self.waypoint_graph.add_node(int_marker.name, {"id": id}) else: rospy.logerr( "(Interactive_Waypoint_Server) Failed to insert waypoint {0}. Knowledge database service call returned error." .format(name)) def saveWaypointsServicecall(self, request): filename = request.file_name self._saveWaypointsToFile(filename) rospy.loginfo( "(Interactive_Waypoint_Server) Saved waypoints to {0}".format( filename)) return SaveWaypointsResponse() def _saveWaypointsToFile(self, filename): data = {"waypoints": {}, "edges": []} for node in self.waypoint_graph.nodes_iter(): pos = self.server.get(node).pose.position data["waypoints"].update( {node: { "x": pos.x, "y": pos.y, "z": pos.z }}) for u, v in self.waypoint_graph.edges_iter(): data["edges"].append([u, v]) with open(filename, 'w') as f: yaml.dump(data, f, default_flow_style=False) def loadWaypointsFromFile(self, filename): with open(filename, 'r') as f: data = yaml.load(f) for name, pos in data["waypoints"].items(): point = Point(pos["x"], pos["y"], pos["z"]) self.insertMarker(point, name) for edge in data["edges"]: self._connectMarkers(edge[0], edge[1])
class WishPool: def __init__( self, wishes, min_edge_score = MIN_EDGE_SCORE, # min_number_of_edges = MIN_NUMBER_OF_EDGES, connector = default_connector ): """ A filterable pool of wishes represented as nodes. Is used to find relations between wishes. @param connector: default connector is jaccard @return: """ self.graph = Graph() self.min_edge_weight = min_edge_score # self.min_number_of_edges = min_number_of_edges for wish in wishes: self.graph.add_node( wish ) # loops through the existing nodes in the graph for other in self.graph.nodes_iter(): # compares candidate to all existing nodes except itself if other != wish and other.person != wish.person: score = connector( wish, other ) if score > self.min_edge_weight: self.graph.add_edge( wish, other, weight=score ) ## processes the graph, excludes lonely nodes self.connected_nodes = self.update_connected_nodes() debug_graph(self.graph, message="Connected nodes are set") self.lonely_nodes = self.update_lonely_nodes() self.update_cliques() ## evaluates alternatives, gathers suggestions for each wish # this implies that some cliques occur twice self.suggestions = self.update_suggestions() def update_connected_nodes( self ): connected = set() for n in self.graph.edges(): connected.add( n[0] ) return connected def update_lonely_nodes( self ): whole_graph = set( self.graph.nodes() ) return whole_graph.difference( self.connected_nodes ) def remove_lonely_nodes( self ): raise NotImplementedError def update_cliques( self ): result = set() for c in find_cliques( self.graph ): if len(c) > 1: result.add( Clique(c) ) self.cliques = result return result def get_distributed_cliques( self ): self.cliques_for_wish = {} for n in self.graph.nodes(): clique_buffer = [] for c in self.cliques: if n in c.nodes: clique_buffer.append( c ) if len(clique_buffer): self.cliques_for_wish[n.pk] = [ c for c in clique_buffer if len(c.nodes) > 1 ] return self.cliques_for_wish def get_conflicting_cliques( self ): result = {} for w,c in self.get_distributed_cliques(): if len(c) > 1: result[w] = c return result def update_suggestions( self ): suggestions = [] for wish_pk, cliques in self.get_distributed_cliques().items(): suggestion = Suggestion( Wish.objects.get(pk=wish_pk), cliques ) suggestions.append( suggestion ) self.suggestions = suggestions return suggestions def create_groups( self ): distinct_cliques = set() for s in self.suggestions: distinct_cliques.add( s.get_best_clique() ) for c in distinct_cliques: c.create_group() def get_suggestion_pool( self ): return SuggestionPool( self.suggestions )
n = graph.number_of_nodes() i = 0 print('Apply shapefile') for node in graph.nodes(): p = Point(graph.node[node]['lon'], graph.node[node]['lat']) if not shape_file.contains(p): graph.remove_node(node) i += 1 print('{0}/{1} nodes processed'.format(i, n), end='\r') print('{0}/{1} nodes processed'.format(i, n)) print('Search for orphaned nodes') orphaned = set() n = graph.number_of_nodes() i = 0 for node in graph.nodes_iter(): if graph.degree(node) == 0: orphaned.add(node) i += 1 print('{0}/{1} nodes processed'.format(i, n), end='\r') print('{0}/{1} nodes processed'.format(i, n)) print('Delete {0} orphaned nodes'.format(len(orphaned))) graph.remove_nodes_from(orphaned) print('Calculate offset') points = [node[1]['pos'] for node in graph.nodes(data=True)] min_x = min(points, key=lambda p: p[0])[0] min_y = min(points, key=lambda p: p[1])[1] for node in graph.nodes_iter(): pos = (graph.node[node]['pos'][0] - min_x, graph.node[node]['pos'][1] - min_y)