Пример #1
0
def random_walk(seed_node, n, folNm):  # in graph G
    neg_comp = nx_Graph()
    neg_comp.add_node(seed_node)
    node_num = 1
    pres_node = seed_node
    extra = 10
    while node_num < n + extra:
        with open(folNm + "/" + pres_node, 'rb') as f:
            neig_list = pickle_load(f)
        if not neig_list:
            logging_debug("No neighbours")
            break
        if len(neig_list) != 1:
            new_node = rand_choice(list(neig_list.keys()))
        else:
            new_node = list(neig_list.keys())[0]
        wt = neig_list[new_node]
        wt_edge = wt['weight']
        neg_comp.add_edge(pres_node, new_node, weight=wt_edge)
        if len(neg_comp.nodes()) == n:
            break
        pres_node = new_node
        node_num = node_num + 1
        # print(pres_node)
    return neg_comp
Пример #2
0
def engine_picker(graph: Graph,
                  engine: str,
                  node_mode: bool = False) -> Union[str, Pos]:
    """Generate a position dict."""
    if not node_mode:
        graph_ = nx_Graph(_reversed_graph(graph).edges)
    else:
        graph_ = nx_Graph(graph.edges)
    if type(engine) != str:
        return engine

    # TODO: More layout.
    # if engine == "spring":
    layout: Pos = spring_layout(graph_, scale=100)

    inf = float('inf')
    x_max = -inf
    x_min = inf
    y_max = -inf
    y_min = inf
    for x, y in layout.values():
        x = round(float(x), 4)
        y = round(float(y), 4)
        if x > x_max:
            x_max = x
        if x < x_min:
            x_min = x
        if y > y_max:
            y_max = y
        if y < y_min:
            y_min = y
    x_cen = (x_max + x_min) / 2
    y_cen = (y_max + y_min) / 2
    pos: Pos = {
        node: (round(float(x), 4) - x_cen, round(float(y), 4) - y_cen)
        for node, (x, y) in layout.items()
    }
    return pos
Пример #3
0
def starting_edge(folNm, seed_node):
    with open(folNm + "/" + seed_node, 'rb') as f:
        neig_list = pickle_load(f)

    cd = 1
    if not neig_list:
        cd = 0
        return cd, None
    imp_neig = max(neig_list.items(), key=lambda elem: elem[1]['weight'])[0]
    # Largest weight neighbor - gives the most confident graphs
    wt_edge = neig_list[imp_neig]['weight']

    g1 = nx_Graph()
    g1.add_edge(seed_node, imp_neig, weight=wt_edge)
    return cd, g1
Пример #4
0
 def get_max_matchable_edges_networkx(self, ndds=None):
     """Find the set of edges which must be in every maximum matching.
     """
     g = nx_Graph()
     translate = {}
     for edge in self.es:
         v1 = edge.source()
         v2 = edge.target()
         if v2.index() < v1.index():
             continue
         new_edge = (v1.index(), v2.index())
         if self.edge_exists(v2, v1) or edge.donor().is_altruistic():
             g.add_node(new_edge[0])
             g.add_node(new_edge[1])
             g.add_edge(v1.index(), v2.index())
             translate[new_edge] = edge
     count = len(translate)
     for ndd in ndds:
         for edge in ndd.edges:
             v1 = edge.donor()
             v2 = edge.target()
             new_edge = (v2.index(), count + v1.index())
             g.add_node(new_edge[0])
             g.add_node(new_edge[1])
             g.add_edge(v2.index(), count + v1.index())
             translate[new_edge] = edge
     # TODO Add NDD edges to this graph!
     largest = max_weight_matching(g)
     LOGGER.debug("Largest matching has size %d", len(largest))
     edges = []
     for v1, v2 in largest.items():
         if v1 < v2:
             edges.append([v1, v2])
     matchable = []
     while edges:
         v1, v2 = edges.pop()
         LOGGER.debug("Testing [%s, %s]", v1, v2)
         g.remove_edge(v1, v2)
         new_max = max_weight_matching(g)
         if len(new_max) < len(largest):
             LOGGER.debug("new matching has size %d", len(new_max))
             edges = list(filter(lambda x: x[0] in new_max, edges))
             matchable.append((v1, v2))
             LOGGER.debug("[%s, %s] is matchable", v1, v2)
         g.add_edge(v1, v2)
     LOGGER.info("Found %s maximally matchable edges" % len(matchable))
     return (translate[e] for e in matchable)
Пример #5
0
def search_metropolis_clique_start(scaler,par_inputs_fn,G_clique): # Picks out of a subset of its neighbors and adds the best node 
    #print(seed_clique) 
    with open(par_inputs_fn,'rb') as f:                
        inputs = pickle_load(f)      
    with open(inputs['modelfname'],'rb') as f:
        model = pickle_load(f)       
    g1=nx_Graph(G_clique)
    
    # Finding score 
    (score_prev,comp_bool) = get_score(g1,model,scaler,inputs['model_type']) 
                
    # Removing starting points which are not complexes    
    if comp_bool == 0:
        return ([],0)
              
    g1 = met(g1,model,scaler,inputs,score_prev)
    
    return g1
Пример #6
0
def search_metropolis_clique_start(scaler, par_inputs_fn, G_clique):
    # Picks out of a subset of its neighbors and adds the best node
    # print(seed_clique)
    with open(par_inputs_fn, 'rb') as f:
        inputs = pickle_load(f)
    with open(inputs['modelfname'], 'rb') as f:
        model = pickle_load(f)
    g1 = nx_Graph(G_clique)

    # Finding score
    score_prev, comp_bool = get_score(g1, model, scaler, inputs['model_type'])

    # Removing starting points which are not complexes
    if score_prev < inputs["classi_thresh"]:
        return
    a, b = met(g1, model, scaler, inputs, score_prev)
    name = " ".join([str(n) for n in g1.nodes()])
    with open(folNm_out + "/" + name, 'wb') as f:
        pickle_dump((a, b), f)
Пример #7
0
def starting_edge_update(folNm, seed_node):
    with open(folNm + "/" + seed_node, 'rb') as f:
        neig_list = pickle_load(f)

    for neighh in neig_list:
        neig_list[neighh]['graph_neigs'] = [(seed_node,
                                             neig_list[neighh]['weight'])]
    cd = 1
    if not neig_list:
        cd = 0
        return cd, None, None
    imp_neig = max(neig_list.items(), key=lambda elem: elem[1]['weight'])[0]
    # Largest weight neighbor - gives the most confident graphs
    wt_edge = neig_list[imp_neig]['weight']

    g1 = nx_Graph()
    g1.add_edge(seed_node, imp_neig, weight=wt_edge)
    neig_list = update_neig_list(neig_list, imp_neig, folNm, g1.nodes())
    return cd, g1, neig_list
Пример #8
0
def search_max_neig(seed_node,scaler,par_inputs_fn):
    with open(par_inputs_fn,'rb') as f:                
        inputs = pickle_load(f)      
    with open(inputs['modelfname'],'rb') as f:
        model = pickle_load(f)    # Seed node
    logging_debug("Seed node is",seed_node)
    folNm = inputs['folNm']
    with open(folNm+"/"+seed_node,'rb') as f:
        neig_list = pickle_load(f)
    folNm_out = inputs['folNm_out']
    
    if not neig_list:
        return         
            
    imp_neig = max(neig_list) # Largest weight neighbor - gives the most confident graphs 
    wt = neig_list[imp_neig]
    wt_edge = wt['weight']
    
    score_curr = 0
    g1=nx_Graph()
    g1.add_edge(seed_node,imp_neig,weight=wt_edge)
    
    max_nodes = inputs["max_size"]
    
    
    while True:

        logging_debug("Adding next node")
            
        imp_neigs = dict()
        g1_nodes = g1.nodes()
        for node in g1_nodes:
            # get its max neighbor and weight and store in dict 
            with open(folNm+"/"+node,'rb') as f:
                neig_list = pickle_load(f)          
            
            # Remove neighbors already in graph - one small computation to save memory
            neig_fin = set(neig_list) -  set(g1_nodes)
            neig_list = dict([neig for neig in list(neig_list.items()) if neig[0] in neig_fin])
            
            if not neig_list: # Checking if empty
                break
            imp_neig = max(neig_list)
            wt = neig_list[imp_neig]
            wt_edge = wt['weight']
            imp_neigs[imp_neig] = wt_edge
                
        if not imp_neigs:
            logging_debug("No more neighbors to add")
            break
        
        node_to_add = max(imp_neigs) # Check again that this is the max 
        #ADD ALL EDGES OF NEW NODE TO ORIG GRAPH
        
        with open(folNm+"/"+node_to_add,'rb') as f:
            its_neig_list = pickle_load(f)              
            
        orig_nodes = g1.nodes()
        for node in orig_nodes:
            if node in its_neig_list:
                wt = its_neig_list[node]
                wt_edge = wt['weight']
                g1.add_edge(node_to_add,node,weight=wt_edge)
            
        if len(g1) > max_nodes:
            logging_debug("Max size exceeded")
            break
        
        score_prev = score_curr       
        
        (score_curr,comp_bool) = get_score(g1,model,scaler,inputs['model_type'])

        if comp_bool == 0:
            logging_debug("Complex found")
        
            # Remove the node last added                
            g1.remove_node(node_to_add)
            score_curr = score_prev
            break        
    with open(folNm_out+"/"+seed_node,'wb') as f:  
        pickle_dump((list(g1.nodes()),score_curr),f)