def run_experiment_local_search(problem: StandardProblem,
                                problem_solver: SearchProblemSolver,
                                result_title: str = "graph",
                                experiment_count: int = 100) -> float:
    """ Solves problem using 50 different randomly selected start nodes.

    :param problem: problem which contains graph nodes
    :param problem_solver: specific implementation of ProblemSolver
    :param result_title: title which will be given to result image
    :param experiment_count: number of algorithm runs to be performed
    """
    if problem.dimension < experiment_count:
        raise ValueError(f"problem.dimension < {experiment_count}")

    distance_matrix: np.ndarray = utils.create_distance_matrix(problem)
    paths = []
    times = []
    for _ in range(experiment_count):
        time_start = time.time()
        path = problem_solver.solve(distance_matrix)
        time_end = time.time()
        times.append((time_end - time_start))
        paths.append(path)

    average_time = __process_results(problem, result_title, paths, times)
    return average_time
def __process_results(problem: StandardProblem,
                      result_title: str,
                      paths: list,
                      times: list,
                      search_invocations: list = None):
    distance_matrix: np.ndarray = utils.create_distance_matrix(problem)

    # Calculate min, max and average cycle lengths
    cycle_lengths = [utils.calculate_path_length(distance_matrix, path) for path in paths]
    minimum_length, shortest_cycle_index = min((val, idx) for (idx, val) in enumerate(cycle_lengths))
    maximum_length = max(cycle_lengths)
    average_length = round(sum(cycle_lengths) / len(cycle_lengths))

    maximum_time = max(times)
    minimum_time = min(times)
    average_time = round(sum(times) / len(times), 3)

    # Draw best cycle
    shortest_path = [index + 1 for index in paths[shortest_cycle_index]]
    result_title = f"{result_title}_{shortest_path[0]}"
    utils.draw_graph(problem, shortest_path, result_title, minimum_length)

    print(result_title)
    print(f"Path : {shortest_path}")
    print(f"Cycle length (min) : {minimum_length}")
    print(f"Cycle length (max) : {maximum_length}")
    print(f"Cycle length (avg) : {average_length}")
    print(f"Time (min) : {round(minimum_time * 1000.0)}ms")
    print(f"Time (max) : {round(maximum_time * 1000.0)}ms")
    print(f"Time (avg) : {round(average_time * 1000.0)}ms")

    if search_invocations:
        maximum_invocations = max(search_invocations)
        minimum_invocations = min(search_invocations)
        average_invocations = round(sum(search_invocations) / len(search_invocations), 3)
        print(f"Search invocations (min) : {minimum_invocations}")
        print(f"Search invocations (max) : {maximum_invocations}")
        print(f"Search invocations (avg) : {average_invocations}")

    print()
    return average_time
def run_experiment_constructive(problem: StandardProblem,
                                problem_solver: ProblemSolver,
                                result_title: str = "graph",
                                experiment_count: int = 50):
    """ Solves problem using 50 different randomly selected start nodes.

    :param problem: problem which contains graph nodes
    :param problem_solver: specific implementation of ProblemSolver
    :param result_title: title which will be given to result image
    :param experiment_count: number of algorithm runs to be performed
    """

    if problem.dimension < experiment_count:
        raise ValueError(f"problem.dimension < {experiment_count}")

    # Pick different random nodes
    random_nodes: Set[int] = set()
    while len(random_nodes) < experiment_count:
        random_node = random.randint(1, problem.dimension)

        if random_node not in random_nodes:
            random_nodes.add(random_node)

    # Create distance matrix and solve TSP problem using every random start node
    distance_matrix: np.ndarray = utils.create_distance_matrix(problem)
    for i in range(len(distance_matrix)):
        distance_matrix[i, i] = np.iinfo(distance_matrix.dtype).max

    paths = []
    times = []
    for node_index in random_nodes:
        time_start = time.time()
        path = problem_solver.solve(distance_matrix, node_index - 1)
        time_end = time.time()
        times.append((time_end - time_start))
        paths.append(path)

    __process_results(problem, result_title, paths, times)
Esempio n. 4
0
def global_convexity_tests(problem: StandardProblem,
                           number_of_solutions: int = 1000,
                           similarity_function=node_similarity,
                           title: str = ""):
    distance_matrix = create_distance_matrix(problem)

    problem_solver = GreedyLocalSearch(use_node_swap=True)

    pool = Pool(processes=os.cpu_count())
    pool_results = []
    solutions = []
    for i_ in range(number_of_solutions):
        if len(pool_results) == os.cpu_count():
            for pool_res in pool_results:
                solution = pool_res.get()
                solutions.append(np.array(solution))
            pool_results.clear()

        res = pool.apply_async(problem_solver.solve, (distance_matrix, ))
        pool_results.append(res)

    for pool_res in pool_results:
        solution = pool_res.get()
        solutions.append(np.array(solution))
    pool_results.clear()
    pool.close()

    solution_cost = np.array([
        calculate_path_length(distance_matrix, list(cycle))
        for cycle in solutions
    ])
    best_cost_index = np.argmin(solution_cost)
    best_solution = solutions[best_cost_index]

    similarity = \
        np.array([similarity_function(cycle, best_solution) for cycle in solutions])

    average_other_similarity = np.zeros(shape=similarity.shape)
    best_solution_similarity = np.zeros(shape=similarity.shape)
    for i in np.arange(start=0, stop=similarity.shape[0]):
        mask = np.ones(similarity.shape, bool)
        mask[i] = False
        average_other_similarity[i] = np.average(similarity[mask])
        best_solution_similarity[i] = similarity_function(
            solutions[i], best_solution)

    correlation = np.corrcoef(solution_cost, average_other_similarity)[0][1]
    print(f"Correlation parameter ({title}) : {correlation}")

    indices = np.argsort(solution_cost)

    plt.scatter(solution_cost[indices],
                average_other_similarity[indices],
                label="Average other similarity")
    plt.scatter(solution_cost[indices],
                best_solution_similarity[indices],
                label="Best solution similarity")
    plt.legend()
    plt.title(title)
    plt.savefig(f"./graphs/{title}.pdf")
    plt.show()