def solver(G, source): # type: (nx.Graph, typing.Any) -> typing.Tuple[typing.List[typing.Any], int] """ Produces the optimal TSP tour using a naive solution - O(n!) :param G: A fully connected networkx graph. :param source: A source node in G. :return: A list of nodes to visit, forming a TSP tour, and the cost of that tour. """ utils.check_arguments(G, source) n = G.number_of_nodes() best_node_permutation = None best_cost = INFINITY distance = utils.get_adjacency_dicts(G) for node_permutation in itertools.permutations(G.nodes): if node_permutation[0] != source: continue cost_of_node_permutation = 0 for i in range(0, n): current_node = node_permutation[i] next_node = node_permutation[(i + 1) % n] cost_of_node_permutation += distance[current_node][next_node] if cost_of_node_permutation < best_cost: best_node_permutation = tuple(node_permutation + (source,)) best_cost = cost_of_node_permutation return best_node_permutation, best_cost
def main(): # # ---> Check for 'restart' argument # arguments = utils.check_arguments(sys.argv) if ("restart" in arguments): restart_clean(db) # # ---> Catch the exit signal to commit the database with last checkpoint # signal.signal(signal.SIGINT, exit_handler) # # --> Marking files for deletion # nb, size = find_for_deletion(db) print(FMT_STR_MARKED_FILES.format(nb, utils.humanbytes(size))) # # --> Deleting/Trashing files # nb_trash, nb_fail, size = move_files(db) print(FMT_STR_TRASHED_FILES.format(nb_trash, trash, utils.humanbytes(size))) return
def handle_dig_circular(req): try: interface = MoveGroupPythonInteface() print "Starting full traj planning session" dig_circular_args = activity_full_digging_traj.arg_parsing_circ(req) if utils.check_arguments(dig_circular_args[1], dig_circular_args[2], dig_circular_args[3]) != True: print "[ERROR] Invalid trench input arguments. Exiting path_planning_commander..." return currentDT = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") location = "full_traj_" bagname = location + currentDT utils.start_traj_recording(dig_circular_args[6], bagname) result = activity_full_digging_traj.dig_circular( interface.move_arm, interface.move_limbs, dig_circular_args) utils.stop_traj_recording(result, bagname) except rospy.ROSInterruptException: return except KeyboardInterrupt: return print "Finished planning session succesfully..." return True, "Done"
def handle_deliver_sample(req): try: interface = MoveGroupPythonInteface() print "Starting sample delivery session" deliver_sample_args = activity_deliver_sample.arg_parsing(req) if utils.check_arguments(deliver_sample_args[1], deliver_sample_args[2], deliver_sample_args[3]) != True: print "[ERROR] Invalid sample delivery input arguments. Exiting path_planning_commander..." return currentDT = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") location = "full_traj_" bagname = location + currentDT utils.start_traj_recording(deliver_sample_args[4], bagname) result = activity_deliver_sample.deliver_sample( interface.move_arm, deliver_sample_args) utils.stop_traj_recording(result, bagname) except rospy.ROSInterruptException: return except KeyboardInterrupt: return print "Finished planning session for linear trenching succesfully..." return True, "Done"
def handle_grind(req): try: interface = MoveGroupPythonInteface() print "Starting grinder planning session" grind_args = activity_grind.arg_parsing(req) if utils.check_arguments(grind_args[1], grind_args[2], grind_args[3]) != True: print "[ERROR] Invalid grinder trajectory input arguments. Exiting path_planning_commander..." return currentDT = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") location = "full_traj_" bagname = location + currentDT utils.start_traj_recording(grind_args[6], bagname) result = activity_grind.grind(interface.move_arm, interface.move_limbs, interface.move_grinder, grind_args) utils.stop_traj_recording(result, bagname) except rospy.ROSInterruptException: return except KeyboardInterrupt: return print "Grinder planning succesfully finished..." return True, "Done"
def main(): # Colorama init init() # # ---> Check for 'restart' argument # arguments = utils.check_arguments(sys.argv) if ("restart" in arguments): restart = True else: restart = False # # ---> Catch the exit signal to commit the database with last checkpoint # signal.signal(signal.SIGINT, exit_handler) # # ---> Read the directory files list # with open(filelist, "r") as f: basepath = f.readlines() #print(basepath) print("Default blocksize for this system is {} bytes.".format( io.DEFAULT_BUFFER_SIZE)) # # ---> DB connection # cnx = db_connect(db, restart) last_step, last_id = get_status(cnx) print("Last step: {}, last ID: {}".format(last_step, last_id)) next_step = False # Looking for files # --- if (last_step == None) | ((last_step == "directory_lookup") & (last_id == "in progress")): t, nb = directories_lookup(cnx, basepath) print("Files lookup duration: {:.2f} sec for {} files.".format(t, nb)) next_step = True else: print("Files lookup already done.") # Calculating pre hash (quick hash on first bytes) # --- if (next_step | ((last_step == "directory_lookup") & (last_id == "all")) | ((last_step == "filelist_pre_hash") & (last_id != "all"))): t = filelist_pre_hash(cnx, 'md5') print("Pre-hash calculation duration: {:.2f} sec. ". format(t)) next_step = True else: print("Pre-hash calculation already done.") # Calculate size of all files # --- res = cnx.execute("select sum(size) FROM filelist") size = res.fetchone()[0] print("Size of all files: {}".format(utils.humanbytes(size))) # Recomputing hashes for duplicates candidates # --- if (next_step | ((last_step == "filelist_pre_hash") & (last_id == "all")) | ((last_step == "pre_duplicates_rehash") & (last_id != "all"))): t, nb = pre_duplicates_rehash(cnx) print("Pre-duplicates rehashing duration: {:.2f} sec. for {} records.". format(t, nb)) next_step = True else: print("Pre-duplicates rehashing already done.") # Dealing with duplicates # --- if (next_step | (last_step == "pre_duplicates_rehash")): t, nb_dup, size_dup = duplicates_update(cnx) else: nb_dup, size_dup = duplicates_select(cnx) # Result summary # --- print("{} files have duplicates, total size of duplicate files is {}.". format(nb_dup, utils.humanbytes(size_dup))) # Closing database # --- cnx.close() return
def solver(G, source): # type: (nx.Graph, typing.Any) -> typing.Tuple[typing.List[typing.Any], int] """ Produces the optimal TSP tour using the Held-Karp, dynamic programming approach - O(n^2 * 2^n) :param G: A fully connected networkx MultiDiGraph. :param source: A source node in G. :return: A list of nodes to visit, forming a TSP tour, and the cost of that tour. """ utils.check_arguments(G, source) n = G.number_of_nodes() distance = utils.get_adjacency_dicts(G) min_cost_dp = {} # type: typing.Dict[Index, int] parent = {} # type: typing.Dict[Index, typing.Any] nodes_except_source = list(G.nodes) nodes_except_source.remove(source) for _set in _power_set(nodes_except_source): # type: set _set = set(_set) for current_vertex in nodes_except_source: if current_vertex in _set: continue index = Index(current_vertex, _set) min_cost = INFINITY min_prev_vertex = source set_copy = set(copy.deepcopy(_set)) for prev_vertex in _set: cost = distance[prev_vertex][current_vertex] + _get_cost(set_copy, prev_vertex, min_cost_dp) if cost < min_cost: min_cost = cost min_prev_vertex = prev_vertex if len(_set) == 0: min_cost = distance[source][current_vertex] min_cost_dp[index] = min_cost parent[index] = min_prev_vertex _set = set(nodes_except_source) min = INFINITY prev_vertex = None set_copy = copy.deepcopy(_set) for vertex in _set: cost = distance[vertex][source] + _get_cost(set_copy, vertex, min_cost_dp) if cost < min: min = cost prev_vertex = vertex parent[Index(source, _set)] = prev_vertex tour = _get_tour(source, parent, G.nodes) return tour, min