def get_short_path(nodes, from_node, to_node): node_lookup = {} for node in nodes: node_id = node['node'] node_lookup[node_id] = node graph = {} for node in nodes: node_id = node['node'] edges = [] for edge in node['edges']: cost = _get_distance(node_lookup, node_id, edge) edges.append((edge, cost)) graph[node_id] = edges short_path = path.shortest_path(graph, from_node, to_node) result = [] for node_id in short_path: result.append(node_lookup[node_id]) return result
def main(): """ main method """ print "Calibrating Roboclaws" #roboclaw.calibrateRoboclaws() try: thread.start_new_thread(receive, ("ReceiverThread", 1)) except: print "Error: unable to start thread" mutex.acquire() # follow_behind_ball() # rush_goal() # score_goal() #go_to_point_behind_ball() #one_on_one() #test() visited, shortest_path = path.shortest_path(graph, '0,0', '5,0') path.print_path(shortest_path) while True: pass
def generate_path(self): #USE PYTHON MODULE TO GENERATE SHORTEST PATH start_pos = [self.end_points[0], self.end_points[1]] end_pos = [self.end_points[2], self.end_points[3]] self.path = path.shortest_path(self.nodes, start_pos, end_pos)
'--depth', help='Scraping depth.', type=int, default=2) parser_path = subparsers.add_parser('path') parser_path.add_argument("START_URL", type=verified_url_format, help=""" Starting URL in format 'https://xxxxx.yy', 'https://xxxx.yy/zzz/' or 'https://xxxx.yy/zzz') """) parser_path.add_argument("END_URL", type=verified_url_format, help=""" Path end URL in format 'https://xxxxx.yy', 'https://xxxx.yy/zzz/' or 'https://xxxx.yy/zzz') """) args = parser.parse_args() start_url = args.START_URL job = args.job if job == 'path': end_url = args.END_URL shortest_path(start_url, end_url) if job == 'network': depth = args.depth scrap(start_url, depth)