def weight(main_graph, start_node: int, end_node: int) -> int:
    """
    Calculating the tentative weight to a certain edge in the graph according to the link prediction rule
    while solving the shortest path problem with Dijkstra's algorithm.

    Parameters
    ----------
    main_graph: Graph
        The graph in which we want to assign weight in.
    start_node: int
        Index of the starting vertex of the edge.
    end_node: int
        Index of the end vertex of the edge.

    Notes
    -----
        The weight depends on the current capacity of the edge. We have to rebuild, if there are no more links available
        along the edge.
    """

    if main_graph.get_edge_capacity(start_node, end_node) == 0:
        return routing_simulation.Settings().long_link_cost *\
               main_graph.physical_distance(start_node=start_node, end_node=end_node)
    else:
        return routing_simulation.Settings().original_cost
Ejemplo n.º 2
0
    def update_stored_weights(self, elapsed_time: int):
        """
        Determines those stored weights of edges in the graph whose link consumption time has been passed according to
        the elapsed time. Then updates these weights with a weight corresponding to the rebuild time of an unavailable
        link.

        Parameters
        ----------
        elapsed_time: int
            Elapsed time since the network has been serving demands.

        Notes
        -----
            This is the main part of functionality of the link prediction rule. After the elapsed time, some nodes just
            "act as if" they knew that links further away were missing.

        """
        edges_to_update = [x for x in self.edge_frequencies if self.link_consumption_time[x] < elapsed_time]
        for edge in edges_to_update:

            start_node = edge[0]
            end_node = edge[1]
            successful_rebuild_time = 1 / routing_simulation.Settings().rebuild_probability

            new_weight = successful_rebuild_time ** self.physical_distance(start_node, end_node)

            self.update_stored_weight_of_edge(start_node, end_node, new_weight)
def compute_latency_to_rebuild(graph, initial_node: int, end_node: int,
                               no_link_length: int, exponential_scale: bool = True) -> tuple:
    """
    Processes the virtual links used in the current path and sums the latency

    Parameters
    ----------
    graph: Graph
        The graph in which we run our simulation.
    start_node: int
        Index of the starting vertex, from which we are looking for the shortest path.
    end_node: int
        Index of the end vertex, towards which we are looking for the shortest path.

    no_link_length: int
        The overall length of the virtual links that need to be rebuilt.

    exponential_scale: bool
        Value determining whether or not the latency scales exponentially with the distance.
        If the value if False, a polynomial scaling is used.

    Returns
    -----
        A tuple of latency and a value of whether or not the rebuild could take place within the time window.

    Notes
    -----
        If the rebuilding process cannot take place within a pre-defined threshold time value, then we simply start
        rebuilding along the physical graph and compute the latency accordingly.
    """
    latency_to_rebuild = 0
    could_rebuild_in_time_window = True

    # Check if there are links which were not available through the path
    if no_link_length > 0:

        # If we cannot create the missing entangled links in the specific threshold time
        # Then simply generate entangled links along the physical graph
        local_settings = routing_simulation.Settings()
        successful_rebuild_time = 1 / local_settings.rebuild_probability

        time_to_rebuild_path = successful_rebuild_time ** no_link_length \
            if exponential_scale else no_link_length ** 2

        if local_settings.time_threshold < time_to_rebuild_path:

            could_rebuild_in_time_window = False

            if exponential_scale:
                latency_to_rebuild = successful_rebuild_time ** graph.physical_distance(initial_node, end_node)
            else:
                latency_to_rebuild = graph.physical_distance(initial_node, end_node) ** 2
        else:
            latency_to_rebuild += time_to_rebuild_path

    return latency_to_rebuild, could_rebuild_in_time_window
def local_knowledge_algorithm(graph_edges: list, number_of_source_destination_pairs: int, propagation_radius: int = 0,
                              exponential_scale: bool = True):
    """
    Runs the local knowledge algorithm specified by a propagation radius.
    The local knowledge of nodes about the virtual links used within a path is updated within
    a specified propagation radius.

    Parameters
    ----------
    graph_edges: list
        The graph edges to be added as local knowledge to the vertices of the graph.

    number_of_source_destination_pairs: int
        Specifies the number of demands that need to be generated.

    propagation_radius: int
        Index of the end vertex, towards which we are looking for the shortest path.

    exponential_scale: bool
        Specifies whether long link creation scales exponentially or polynomially with time.
    """

    # Generate the specific graph object
    main_graph = create_graph_with_local_knowledge(graph_edges)

    result_for_source_destination = []
    for x in range(1, number_of_source_destination_pairs + 1):

        temp_result: tuple = ()

        simulation_settings = routing_simulation.Settings()

        source = random.randint(1, simulation_settings.number_of_nodes)
        dest = random.randint(1, simulation_settings.number_of_nodes)

        while source == dest:
            dest = random.randint(1, simulation_settings.number_of_nodes)

        # Initialize path
        # Determine shortest path based on local knowledge
        current_path = shortest_path.dijkstra(main_graph.vertices[source].local_knowledge, source, dest)
        current_distance = len(current_path)-1

        temp_result += (distribute_entanglement(main_graph, current_path, exponential_scale),)

        # Update local knowledge of the nodes that are along the current path
        update_local_knowledge(main_graph, current_path, propagation_radius)

        temp_result += (main_graph.get_sum_of_link_capacities(),)
        temp_result += (main_graph.get_available_link_count(),)
        temp_result += (current_distance,)
        result_for_source_destination.append(temp_result)
    return helper.map_tuple_gen(np.mean, zip(*result_for_source_destination))
    def test_generate_random_pairs(self):

        number_of_pairs = 100
        generated_pairs = routing_algorithms.generate_random_pairs(
            number_of_pairs)
        number_of_nodes = routing_simulation.Settings().number_of_nodes

        self.assertEqual(number_of_pairs, len(generated_pairs))
        [self.assertTrue(x[0] != x[1]) for x in generated_pairs]
        [
            self.assertTrue(0 < x[0] < number_of_nodes + 1
                            and 0 < x[1] < number_of_nodes + 1)
            for x in generated_pairs
        ]
Ejemplo n.º 6
0
def write_results_to_file(simulation_results: list, algorithm_name: str,
                          approach: str, elapsed_time: int):
    importlib.reload(logging)
    logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s',
                        level=logging.DEBUG,
                        datefmt='%I:%M:%S')
    timestr = time.strftime("%y_%m_%d__%H_%M")

    # Create the logs directory, if it is non-existant yet
    directory = 'logs'
    if not os.path.exists(directory):
        os.makedirs(directory)

    fileh = logging.FileHandler(
        './' + directory + '/' + algorithm_name + '_' +
        str(routing_simulation.Settings().number_of_samples) + '_' + approach +
        '_' + timestr + '.log', 'a')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fileh.setFormatter(formatter)

    log = logging.getLogger()  # root logger
    for hdlr in log.handlers[:]:  # remove all old handlers
        log.removeHandler(hdlr)
    log.addHandler(fileh)  # set the new handler

    # Names of simulation measures
    simulation_measures = [
        'Average waiting times:', 'Number of available links:',
        'Number of available edges:', 'Average distances:'
    ]

    if '' == approach:
        log.debug('Logging the simulation results of the ' + algorithm_name +
                  ' algorithm.')
    else:
        log.debug('Logging the simulation results of the ' + algorithm_name +
                  ' algorithm (using the ' + approach + '.')
    log.debug('The simulation measures are as follows: ')
    log.debug(simulation_measures)
    log.debug('Detailed simulation results based on the graph measured: ')
    for graph_index in range(len(simulation_results)):
        log.debug('graph' + str(graph_index) + ':')

        # Log the stores containing the results
        log_graph_results(log, simulation_results[graph_index],
                          simulation_measures)

    # Log the elapsed time
    log.debug('The elapsed time was: ' + str(elapsed_time))
def generate_random_pairs(number_of_pairs: int) -> list:
    """
    Generates a certain number of random source-destination pairs.

    Parameters
    ----------
    number_of_pairs : int
        Integer specifying the number of source-destination pairs to be generated.

    Returns
    -----
        List of tuples containing the source and destination nodes
    """
    result = []
    number_of_nodes = routing_simulation.Settings().number_of_nodes

    for x in range(number_of_pairs):
        result += [generate_random_source_destination(number_of_nodes)]
    return result
def global_knowledge_algorithm(main_graph, number_of_source_destination_pairs: int,
                               exponential_scale: bool = True) -> list:
    """
    Applies the global knowledge approach for a certain graph by generating a specific number of demands.

    Parameters
    ----------
    main_graph : Graph
        The graph in which we serve the demands according to the global knowledge approach.

    number_of_source_destination_pairs: int
        Specifies the number of demands that need to be generated.

    exponential_scale: bool
        Specifies whether long link creation scales exponentially or polynomially with time.

    Notes
    ----------
    Add the data (measures) in the following order:
    (1) The waiting time
    (2) Number of available virtual links
    (3) Number of available edges
    (4) Distance of the path

    """
    result_for_source_destination = []
    number_of_nodes = routing_simulation.Settings().number_of_nodes
    for x in range(1, number_of_source_destination_pairs + 1):
        temp_result = ()

        source, dest = generate_random_source_destination(number_of_nodes)

        # Initialize path
        # The change in network is considered in this approach (path is UPDATED)
        current_path = shortest_path.dijkstra(main_graph, source, dest)

        temp_result += (distribute_entanglement(main_graph, current_path, exponential_scale),)
        temp_result += (main_graph.get_sum_of_link_capacities(),)
        temp_result += (main_graph.get_available_link_count(),)
        temp_result += (len(current_path)-1,)
        result_for_source_destination.append(temp_result)
    return result_for_source_destination
    def test_on_demand_distribute_entanglement(self):
        factory = graph_edge_factory.VirtualEdgeFactory(capacity=0)
        deterministic_edges = factory.generate_deterministic_graph_edges()
        main_graph = graph.Graph(deterministic_edges)
        for x in range(3, factory.number_of_nodes + 1):
            path = [node for node in range(1, x)]
            local_settings = routing_simulation.Settings()
            unit_time_for_rebuild = (1 / local_settings.rebuild_probability)

            potential_latency = unit_time_for_rebuild**(len(path) - 1)

            if local_settings.time_threshold > potential_latency:
                self.assertEqual(
                    routing_algorithms.distribute_entanglement(
                        main_graph, path), potential_latency)
            else:
                self.assertEqual(
                    routing_algorithms.distribute_entanglement(
                        main_graph, path),
                    unit_time_for_rebuild**main_graph.physical_distance(
                        path[-1:][0], path[:1][0]))