def key_error_node(): input1 = input("Key Error! Please check node name. " + "Display list of nodes (y/n)?:") if input1 == "y" or input1 == "Y": print("List of Nodes: ") print("============== ") for k in d_node.keys(): print(k)
def main(): # Assign dict with ECT to BVLAN. # (assuming BVLAN 4000 has ECT#1 and 4015 has ECT#15). global d_ect # Global declaration of dict, as also accessed by other cls. d_ect = { "4000": b"\x00", "4001": b"\xff", "4002": b"\x88", "4003": b"\x77", "4004": b"\x44", "4005": b"\x33", "4006": b"\xcc", "4007": b"\xbb", "4008": b"\x22", "4009": b"\x11", "4010": b"\x66", "4011": b"\x55", "4012": b"\xaa", "4013": b"\x99", "4014": b"\xdd", "4015": b"\xee", } # Add experimental ECT ID´s, for calculating max possible path diversity. # Yeah right, VLAN´s > 4096 doesn´t exist - but that doesn´t matter here. for x in range(4016, 4256, 1): ectvalue = hex(x - 4000) # ECT value as string. ectvalue_hex = ectvalue.replace("0x", "") d_ect[str(x)] = bytes.fromhex(ectvalue_hex) global d_ect_path d_ect_path = {} global d_node global d_edges_weight d_edges_weight = {} # v3_0: Create d_edges_isid, to count ISID´s using an edge. global d_edges_isid d_edges_isid = {} global d_edges d_edges = {} global d_node_xor d_node_xor = {} # v2_0: Statistics, on which byte a tiebreaker was found for ECT. global d_tiebreaker_byte d_tiebreaker_byte = {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0} # To speed up computation in large networks, reduce here the number of # BVLANs to be tested (range is "low_bvl_to_test" - "num_bv_to_test" ). global max_bvl_to_test # max_bvl_to_test should be your highest BVLAN plus one. max_bvl_to_test = 4016 global low_bvl_to_test low_bvl_to_test = 4000 # Plausibility checks, for correct BVLAN range. assert max_bvl_to_test > low_bvl_to_test, "Please correct \ max_bvl_to_test and low_bvl_to_test. max value must be larger than low value!" assert 4256 >= max_bvl_to_test >= 4000, "Please correct \ max_bvl_to_test! Specify value between 4000 and 4256" assert 4256 >= low_bvl_to_test >= 4000, "Please correct \ low_bvl_to_test! Specify value between 4000 and 4256" # Plausibility check on d_node. # Check for unidirectional links and cost mismatch. for i in d_node: for j in d_node[i]["Links"]: unidirectional_link_check = True link_cost_mismatch = True for k in d_node[j[0]]["Links"]: if i == k[0]: unidirectional_link_check = False if j[1] == k[1]: link_cost_mismatch = False assert unidirectional_link_check is False,\ "Unidirectional Link found on Node {}!".format(i) assert link_cost_mismatch is False,\ "Link cost mismatch found on Node {}!".format(i) # Check for ISID/BVL mismatch and same BridgeID. for i in d_node: for isid in d_node[i]["ISID"]: check_isid = isid[0] check_bvl = isid[1] for j in d_node: if j != i: assert d_node[i]["BridgeID"] != d_node[j]["BridgeID"],\ "Same BridgeID found on Node {} and {}!".format(i, j) for isid in d_node[j]["ISID"]: if isid[0] == check_isid: assert check_bvl == isid[1],\ "Wrong ISID/BVLAN relation on Node {}!".format(j) """ Modify your SPB network here to test behavior with more ISID`s / BVLAN`s etc. """ # Temporary modification of d_node (i.e. to test more ISID`s): # for k in d_node.keys(): # for i in range(10): # d_node[k]["ISID"].append([100000+i, 4000+ i % 8, 1, 1]) # d_node[k]["ISID"].append([200000+i, 4000+i%256, 1, 1])10 # d_node["Node-5"]["ISID"].append([8000, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8001, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8002, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8003, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8004, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8005, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8006, 4008, 1, 1]) # d_node["Node-5"]["ISID"].append([8007, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8000, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8001, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8002, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8003, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8004, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8005, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8006, 4008, 1, 1]) # d_node["Node-3"]["ISID"].append([8007, 4008, 1, 1]) # Remove "off" nodes from dict. list_keys_to_remove = [] for k in d_node: if d_node.get(k)["SystemState"] != "on": list_keys_to_remove.append(k) for k in range(len(list_keys_to_remove)): d_node.pop(list_keys_to_remove[k]) # v2_0: Populate d_node_xor, results of xor operation of all BVLANs. for bvl in range(low_bvl_to_test, max_bvl_to_test): d_node_xor[bvl] = {} for k in d_node: d_node_xor[bvl][k] = [] for bvl in range(low_bvl_to_test, max_bvl_to_test): for k in d_node: # Populate dict with xor result of SystemID. for i in range(8): d_node_xor[bvl][k].append([ byte_xor( d_node.get(k)["BridgeID"][i][0], d_ect.get(str(bvl))) ]) # Get maximum string lenght of node names. global max_nodename max_nodename = 0 for k in d_node.keys(): if len(k) > max_nodename: max_nodename = len(k) # v4_0: Get a set of all BVLANs and ISIDs in loaded dict, # for having better display filter possibilites. global set_bvls_in_use global set_isids_in_use set_bvls_in_use = set() set_isids_in_use = set() for k in d_node: for i in range(len(d_node.get(k)["ISID"])): set_bvls_in_use.add(d_node.get(k)["ISID"][i][1]) set_isids_in_use.add(d_node.get(k)["ISID"][i][0]) # Plausibility check, that all used BVLANs are calculated. for i in set_bvls_in_use: assert i in range(low_bvl_to_test, max_bvl_to_test), "Please correct \ max_bvl_to_test and low_bvl_to_test! BVLAN " + str(i) + " is used, bot not \ in that range!" # GraphView_v1_1 and View_v5 Plausibility check, that no node has # duplicate ISID´s for k in d_node: for x in range(len(d_node[k]["ISID"])): dup_counter = 0 for y in range(len(d_node[k]["ISID"])): if d_node[k]["ISID"][x][0] == d_node[k]["ISID"][y][0]: dup_counter += 1 assert dup_counter <= 1, "Error, Node {} has ISID {} configured \ multiple times!".format(k, d_node[k]["ISID"][x][0]) # Create Graph and add all edges to d_edges, d_edges_isid, # and d_edges_weight. graph = Graph() for k in d_node: for i in range(len(d_node.get(k)["Links"])): if d_node.get(k)["SystemState"] == "on": # Only if node v is active (is a key in the d_node). a = d_node.get(k)["Links"][i][0] if a in d_node.keys(): graph.add_edge(k, d_node.get(k)["Links"][i][0], d_node.get(k)["Links"][i][1]) key_d_edges = str(k) + "___" + str(a) d_edges[key_d_edges] = [] d_edges_isid[key_d_edges] = [] d_edges_weight[key_d_edges] = d_node.get(k)["Links"][i][1] global d_dijkstra # Global declaration of dict, as accessed by other fct. d_dijkstra = {} # Set every node as S. start_time_dijkstra = time.time() for n in d_node: d_dijkstra[n] = {} # Initialize dict. for bvl in range(low_bvl_to_test, max_bvl_to_test): dijkstra = DijkstraSPF(graph, n, str(bvl)) d_dijkstra[n][bvl] = dijkstra global time_apsp_calculation time_apsp_calculation = time.time() - start_time_dijkstra # Populate d_edges. for src, dst, bvl in product(d_node, d_node, set_bvls_in_use): for i in range(len(d_dijkstra[src][bvl].get_path(dst))): if len(d_dijkstra[src][bvl].get_path(dst)) > 0 and \ i < len(d_dijkstra[src][bvl].get_path(dst))-1: key_d_edges = d_dijkstra[src][bvl].get_path(dst)[i] + "___" +\ d_dijkstra[src][bvl].get_path(dst)[i+1] if key_d_edges in d_edges.keys(): d_edges[key_d_edges].append([src, dst, bvl]) # Calculate tandem multicast mode. start_time_mcast = time.time() d_tandem_isid = {} set_bvl = set() d_tandem_mcast_src = {} d_tandem_mcast_rcv = {} global d_mcast_states d_mcast_states = {} # End results to be displayed per node or in total. for node in d_node: d_mcast_states[node] = [] # Populate d_tandem_mcast_src. for k in d_node: for i in range(len(d_node.get(k)["ISID"])): if d_node.get(k)["ISID"][i][2] == 1: # Check T Bit of ISID. # Create the dict for MCSource and bvl. if k in d_tandem_mcast_src.keys(): d_tandem_mcast_src[k].\ append(d_node.get(k)["ISID"][i][0]) else: d_tandem_mcast_src[k] = \ [d_node.get(k)["ISID"][i][0]] if d_node.get(k)["ISID"][i][0] not in d_tandem_isid.keys(): d_tandem_isid[d_node.get(k)["ISID"][i][0]] = \ [d_node.get(k)["ISID"][i][1]] set_bvl.add(d_node.get(k)["ISID"][i][1]) else: pass # Only add Tandem ISID to dict, if key not yet exist. # Populate d_tandem_mcast_rcv. for k in d_node: for i in range(len(d_node.get(k)["ISID"])): if d_node.get(k)["ISID"][i][3] == 1: # Check R Bit of ISID. if k in d_tandem_mcast_rcv.keys(): d_tandem_mcast_rcv[k].append(d_node.get(k)["ISID"][i][0]) else: d_tandem_mcast_rcv[k] = \ [d_node.get(k)["ISID"][i][0]] if d_node.get(k)["ISID"][i][0] not in d_tandem_isid.keys(): d_tandem_isid[d_node.get(k)["ISID"][i][0]] = \ [d_node.get(k)["ISID"][i][1]] set_bvl.add(d_node.get(k)["ISID"][i][1]) else: pass # Only add Tandem ISID to dict, if key not yet exist. # Calculate MC States, for every node, in every BVLAN. for node, mc_src_a in product(d_node, d_tandem_mcast_src): for isid in d_tandem_mcast_src[mc_src_a]: # For every isid. for bvl, mc_rcv_b in product(d_tandem_isid[isid], d_tandem_mcast_rcv): if isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[1:-1]: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path-1] oil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path+1] d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) elif isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[0] and \ len(d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)) > 1: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = "-" # Node is START of path. oil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path+1] d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) elif isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b)[-1] and len(d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b)) > 1: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[ pos_node_path - 1] oil = "-" # Node is END of path. d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) global time_mcast_calculation time_mcast_calculation = time.time() - start_time_mcast # Consolidate OIL of d_mcast_states. # First step, remove entry`s with empty OIL, if: # there are also entry´s with an OIL # That is the case, if node is receiver and also forkout point. for k in d_mcast_states.keys(): mcast_states_to_delete = [] # Empty initialize for all nodes: for i in range(len(d_mcast_states[k])): if d_mcast_states[k][i][4] == "-": # Check if node has emtpy OIL. # If so, check if node is also forkout point. for j in range(len(d_mcast_states[k])): if d_mcast_states[k][i][0] == d_mcast_states[k][j][0] and \ d_mcast_states[k][i][1] == d_mcast_states[k][j][1] and \ d_mcast_states[k][i][2] == d_mcast_states[k][j][2] and \ d_mcast_states[k][i][3] == d_mcast_states[k][j][3] and \ d_mcast_states[k][i][4] != d_mcast_states[k][j][4]: # Add entry with empty OIL to delete list. mcast_states_to_delete.append(d_mcast_states[k][i]) for x in range(len(mcast_states_to_delete)): if mcast_states_to_delete[x] in d_mcast_states[k]: d_mcast_states[k].remove(mcast_states_to_delete[x]) # Second step, if OIL is not empty and has several entries - merge them. # That is the case, if node is forkout point with several OIL`s or sender. for k in d_mcast_states.keys(): mcast_states_to_delete = [] # Empty initialize for all nodes: for i in range(len(d_mcast_states[k])): # Initialize the OIL that will be applied to: # mcast state after iteration. new_oil = d_mcast_states[k][i][4] # Check that entry was not already checked. if d_mcast_states[k][i] not in mcast_states_to_delete: # Check if node has OIL (is a forkout point). if d_mcast_states[k][i][4] != "-": for j in range(i + 1, len(d_mcast_states[k])): if d_mcast_states[k][i][0] == \ d_mcast_states[k][j][0] and \ d_mcast_states[k][i][1] == \ d_mcast_states[k][j][1] and \ d_mcast_states[k][i][2] == \ d_mcast_states[k][j][2] and \ d_mcast_states[k][i][3] == \ d_mcast_states[k][j][3]: # Add duplicate entry to delete list. mcast_states_to_delete.append(d_mcast_states[k][j]) # If OIL not already in OIL -> merge it. if d_mcast_states[k][j][4] not in new_oil: new_oil += ", " + d_mcast_states[k][j][4] # Finally, assign new OIL to mcast entry. d_mcast_states[k][i][4] = new_oil for x in range(len(mcast_states_to_delete)): if mcast_states_to_delete[x] in d_mcast_states[k]: d_mcast_states[k].remove(mcast_states_to_delete[x]) # Population of d_ect_path. for a in d_node: for b in d_node: k = len(get_max_possible_path_a_to_b(a, b)) # Get number of hops. if k in d_ect_path.keys(): if (a + " -> " + b) not in d_ect_path[k] and a != b: d_ect_path[k].append(a + " -> " + b) else: pass # Path already exist for this key. elif a != b: d_ect_path[k] = [a + " -> " + b] # v3_0: Populate d_edges_isid. # For every edge: for k in d_edges.keys(): # For every connection of an edge: for i in range(len(d_edges[k])): # For all ISIDs on source: for i_src in range(len(d_node[d_edges[k][i][0]]["ISID"])): # If ISID is on BVLAN of connection: if d_edges[k][i][2] == \ d_node[d_edges[k][i][0]]["ISID"][i_src][1]: # For all ISIDs on destination: for i_dst in range(len(d_node[d_edges[k][i][1]]["ISID"])): # Check if ISID is on source AND destination: # If yes: Add connection to "d_edges_isid". if d_node[d_edges[k][i][0]]["ISID"][i_src][0] == \ d_node[d_edges[k][i][1]]["ISID"][i_dst][0]: d_edges_isid[k].append([ d_edges[k][i][0], d_edges[k][i][1], d_node[d_edges[k][i][0]]["ISID"][i_src][1], d_node[d_edges[k][i][0]]["ISID"][i_src][0] ]) # Main User Menu for getting input from user. main_menu()
def menu1(): print("List of nodes: ") print("============== ") for k in d_node.keys(): print(k)
def main(): # Assign dict with ECT to BVLAN. # (assuming BVLAN 4000 has ECT#1 and 4015 has ECT#15). global d_ect # Global declaration of dict, as also accessed by other cls. d_ect = { "4000": b"\x00", "4001": b"\xff", "4002": b"\x88", "4003": b"\x77", "4004": b"\x44", "4005": b"\x33", "4006": b"\xcc", "4007": b"\xbb", "4008": b"\x22", "4009": b"\x11", "4010": b"\x66", "4011": b"\x55", "4012": b"\xaa", "4013": b"\x99", "4014": b"\xdd", "4015": b"\xee", } # Add experimental ECT ID´s, for calculating max possible path diversity. # Yeah right, VLAN´s > 4096 doesn´t exist - but that doesn´t matter here. for x in range(4016, 4256, 1): ectvalue = hex(x - 4000) # ECT value as string. ectvalue_hex = ectvalue.replace("0x", "") d_ect[str(x)] = bytes.fromhex(ectvalue_hex) global d_ect_path d_ect_path = {} global d_node global d_edges d_edges = {} # Plausibility check on d_node. # Check for unidirectional links and cost mismatch. for i in d_node: for j in d_node[i]["Links"]: unidirectional_link_check = True link_cost_mismatch = True for k in d_node[j[0]]["Links"]: if i == k[0]: unidirectional_link_check = False if j[1] == k[1]: link_cost_mismatch = False assert unidirectional_link_check is False,\ "Unidirectional Link found on Node {}!".format(i) assert link_cost_mismatch is False,\ "Link cost mismatch found on Node {}!".format(i) # Check for ISID/BVL mismatch and same BridgeID. for i in d_node: for isid in d_node[i]["ISID"]: check_isid = isid[0] check_bvl = isid[1] for j in d_node: if j != i: assert d_node[i]["BridgeID"] != d_node[j]["BridgeID"],\ "Same BridgeID found on Node {} and {}!".format(i, j) for isid in d_node[j]["ISID"]: if isid[0] == check_isid: assert check_bvl == isid[1],\ "Wrong ISID/BVLAN relation on Node {}!".format(j) """ Modify your SPB network here to test behavior with more ISID`s / BVLAN`s etc. """ # Temporary modification of d_node (i.e. to test more ISID`s): # for k in d_node.keys(): # for i in range(20): # d_node[k]["ISID"].append([100000+i, 4000+i%256, 1, 1]) # d_node[k]["ISID"].append([200000+i, 4000+i%256, 1, 1]) # d_node[k]["ISID"].append([202020, 4015, 1, 1]) # d_node[k]["ISID"].append([202021, 4015, 1, 1]) # d_node[k]["ISID"].append([202022, 4015, 1, 1]) # d_node[k]["ISID"].append([202023, 4015, 1, 1]) # Remove "off" nodes from dict. list_keys_to_remove = [] for k in d_node: if d_node.get(k)["SystemState"] != "on": list_keys_to_remove.append(k) for k in range(len(list_keys_to_remove)): d_node.pop(list_keys_to_remove[k]) # Get maximum string lenght of node names. global max_nodename max_nodename = 0 for k in d_node.keys(): if len(k) > max_nodename: max_nodename = len(k) # Get a set of all BVLAN´s in loaded dict, # for having better display filter possibilites. global set_bvls_in_use set_bvls_in_use = set() for k in d_node: for i in range(len(d_node.get(k)["ISID"])): set_bvls_in_use.add(d_node.get(k)["ISID"][i][1]) # Create Graph and add all edges to d_edges. graph = Graph() for k in d_node: for i in range(len(d_node.get(k)["Links"])): if d_node.get(k)["SystemState"] == "on": # Only if node v is active (is a key in the d_node). a = d_node.get(k)["Links"][i][0] if a in d_node.keys(): graph.add_edge(k, d_node.get(k)["Links"][i][0], d_node.get(k)["Links"][i][1]) key_d_edges = str(k) + "___" + str(a) d_edges[key_d_edges] = [] global d_dijkstra # Global declaration of dict, as accessed by other fct. d_dijkstra = {} # Set every node as S. start_time_dijkstra = time.time() for n in d_node: d_dijkstra[n] = {} # Initialize dict. for bvl in range(4000, 4256): dijkstra = DijkstraSPF(graph, n, str(bvl)) d_dijkstra[n][bvl] = dijkstra global time_apsp_calculation time_apsp_calculation = time.time() - start_time_dijkstra # Populate d_edges. for src, dst, bvl in product(d_node, d_node, set_bvls_in_use): for i in range(len(d_dijkstra[src][bvl].get_path(dst))): if len(d_dijkstra[src][bvl].get_path(dst)) > 0 and \ i < len(d_dijkstra[src][bvl].get_path(dst))-1: key_d_edges = d_dijkstra[src][bvl].get_path(dst)[i] + "___" +\ d_dijkstra[src][bvl].get_path(dst)[i+1] if key_d_edges in d_edges.keys(): d_edges[key_d_edges].append([src, dst, bvl]) # Calculate tandem multicast mode. start_time_mcast = time.time() d_tandem_isid = {} set_bvl = set() d_tandem_mcast_src = {} d_tandem_mcast_rcv = {} global d_mcast_states d_mcast_states = {} # End results to be displayed per node or in total. for node in d_node: d_mcast_states[node] = [] # Populate d_tandem_mcast_src. for k in d_node: for i in range(len(d_node.get(k)["ISID"])): if d_node.get(k)["ISID"][i][2] == 1: # Check T Bit of ISID. # Create the dict for MCSource and bvl. if k in d_tandem_mcast_src.keys(): d_tandem_mcast_src[k].\ append(d_node.get(k)["ISID"][i][0]) else: d_tandem_mcast_src[k] = \ [d_node.get(k)["ISID"][i][0]] if d_node.get(k)["ISID"][i][0] not in d_tandem_isid.keys(): d_tandem_isid[d_node.get(k)["ISID"][i][0]] = \ [d_node.get(k)["ISID"][i][1]] set_bvl.add(d_node.get(k)["ISID"][i][1]) else: pass # Only add Tandem ISID to dict, if key not yet exist. # Populate d_tandem_mcast_rcv. for k in d_node: for i in range(len(d_node.get(k)["ISID"])): if d_node.get(k)["ISID"][i][3] == 1: # Check R Bit of ISID. if k in d_tandem_mcast_rcv.keys(): d_tandem_mcast_rcv[k].append(d_node.get(k)["ISID"][i][0]) else: d_tandem_mcast_rcv[k] = \ [d_node.get(k)["ISID"][i][0]] if d_node.get(k)["ISID"][i][0] not in d_tandem_isid.keys(): d_tandem_isid[d_node.get(k)["ISID"][i][0]] = \ [d_node.get(k)["ISID"][i][1]] set_bvl.add(d_node.get(k)["ISID"][i][1]) else: pass # Only add Tandem ISID to dict, if key not yet exist. # Calculate MC States, for every node, in every BVLAN. for node, mc_src_a in product(d_node, d_tandem_mcast_src): for isid in d_tandem_mcast_src[mc_src_a]: # For every isid. for bvl, mc_rcv_b in product(d_tandem_isid[isid], d_tandem_mcast_rcv): if isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[1:-1]: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path-1] oil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path+1] d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) elif isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[0] and \ len(d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)) > 1: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = "-" # Node is START of path. oil = d_dijkstra[mc_src_a][bvl].\ get_path(mc_rcv_b)[pos_node_path+1] d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) elif isid in d_tandem_mcast_rcv[mc_rcv_b] and node in \ d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b)[-1] and len(d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b)) > 1: pos_node_path = d_dijkstra[mc_src_a][bvl].get_path( mc_rcv_b).index(node) iil = d_dijkstra[mc_src_a][bvl].get_path(mc_rcv_b)[ pos_node_path - 1] oil = "-" # Node is END of path. d_mcast_states[node].append( [mc_src_a, isid, bvl, iil, oil]) global time_mcast_calculation time_mcast_calculation = time.time() - start_time_mcast # Consolidate OIL of d_mcast_states. # First step, remove entry`s with empty OIL, if: # there are also entry´s with an OIL # That is the case, if node is receiver and also forkout point. for k in d_mcast_states.keys(): mcast_states_to_delete = [] # Empty initialize for all nodes: for i in range(len(d_mcast_states[k])): if d_mcast_states[k][i][4] == "-": # Check if node has emtpy OIL # If so, check if node is also forkout point for j in range(len(d_mcast_states[k])): if d_mcast_states[k][i][0] == d_mcast_states[k][j][0] and \ d_mcast_states[k][i][1] == d_mcast_states[k][j][1] and \ d_mcast_states[k][i][2] == d_mcast_states[k][j][2] and \ d_mcast_states[k][i][3] == d_mcast_states[k][j][3] and \ d_mcast_states[k][i][4] != d_mcast_states[k][j][4]: # Add entry with empty OIL to delete list. mcast_states_to_delete.append(d_mcast_states[k][i]) for x in range(len(mcast_states_to_delete)): if mcast_states_to_delete[x] in d_mcast_states[k]: d_mcast_states[k].remove(mcast_states_to_delete[x]) # Second step, if OIL is not empty and has several entries - merge them. # That is the case, if node is forkout point with several OIL`s or sender. for k in d_mcast_states.keys(): mcast_states_to_delete = [] # Empty initialize for all nodes: for i in range(len(d_mcast_states[k])): # Initialize the OIL that will be applied to: # mcast state after iteration. new_oil = d_mcast_states[k][i][4] # Check that entry was not already checked. if d_mcast_states[k][i] not in mcast_states_to_delete: # Check if node has OIL (is a forkout point). if d_mcast_states[k][i][4] != "-": for j in range(i + 1, len(d_mcast_states[k])): if d_mcast_states[k][i][0] == \ d_mcast_states[k][j][0] and \ d_mcast_states[k][i][1] == \ d_mcast_states[k][j][1] and \ d_mcast_states[k][i][2] == \ d_mcast_states[k][j][2] and \ d_mcast_states[k][i][3] == \ d_mcast_states[k][j][3]: # Add duplicate entry to delete list. mcast_states_to_delete.append(d_mcast_states[k][j]) # If OIL not already in OIL -> merge it. if d_mcast_states[k][j][4] not in new_oil: new_oil += ", " + d_mcast_states[k][j][4] # Finally, assign new OIL to mcast entry. d_mcast_states[k][i][4] = new_oil for x in range(len(mcast_states_to_delete)): if mcast_states_to_delete[x] in d_mcast_states[k]: d_mcast_states[k].remove(mcast_states_to_delete[x]) # Population of d_ect_path. for a in d_node: for b in d_node: k = len(get_max_possible_path_a_to_b(a, b)) # Get number of hops. if k in d_ect_path.keys(): if (a + " -> " + b) not in d_ect_path[k]: d_ect_path[k].append(a + " -> " + b) else: pass # Path already exist for this key. else: d_ect_path[k] = [a + " -> " + b] # Main User Menu for getting input from user. main_menu()