def _start(self): if self.debug: print("Just start app.") self.public = cm.Community(self.community_name) if self.debug: print("We got info of community") self.public.display()
def cut_last_closure(graph, order, cs, cand, closure_history): """collapses community down to the last time it was closed """ if closure_history[-1] != min(closure_history): closure_history.reverse() cut = closure_history.index(min(closure_history)) closure_history.reverse() cs = community.Community() external_nodes = cs.init(graph, order[:-cut]) cand = candidates.Candidates(graph, external_nodes, cs) cs.init_bounds(cand) cand.rework_fringe() return cs, cand else: return cs, cand
def __init__(self, **kwargs): super().__init__(**kwargs) self.commune = community.Community().generate() self.cols = len(self.commune) for i, c in enumerate(self.commune): self.name = Label(text=c.full_name+" ("+str(c.age)+")\n\n") for r in c.relations: text = f"{r.full_name} {r.age}\n" \ f"{'-'*len(r.full_name)}\n" \ f"{c.relations[r]}\n\n" self.name.text += text self.name.halign = "center" self.box = BoxLayout() self.box.add_widget(self.name) self.add_widget(self.box)
def main(file): print strftime("%Y-%m-%d %H:%M:%S", gmtime()) ##############create a graph from txt file############# G=bg.make_graph(file) print nx.info(G) ################local centrality to identify meaningful modules of the graph############# #we set 3000 as a module size as discusssed in our paper local_centrality=bg.Bridge(size=3000) bigmod, normmod=local_centrality.bridge_function(G) for _,j in enumerate(bigmod.values()): G1=G.subgraph(j) G2=local_centrality.create_ap_points(G1) _,norm_modules=local_centrality.bridge_function(G2) for i,k in enumerate(norm_modules.values(), start=len(normmod)): normmod[i]=k ################leader identification and community spreading phase############### communities={} comm_counter=0 for j,i in enumerate(normmod.values()): #for very small modules, we search for its neighbors and merge them in the communities if(len(i)<=9): sma=set(i) for k in i: sma.update(nx.neighbors(G, k)) communities[comm_counter]=sma comm_counter+=1 else: #to avoid local heterogeneity we set epsilon (merging value) of 0.75 for big modules and 0.30 for small modules if(10<=len(i)<=500): heterogeneity=0.30 else: heterogeneity=0.75 extract_G=G.subgraph(i) lead=ls.Leader_Identification(leader_epsilon=0.60) leaders=lead.leader_finding(extract_G) cd = dcs.Community() comm_list=cd.execute(extract_G, leaders, epsilon=heterogeneity, depth=2) for c in comm_list.keys(): communities[comm_counter]=c comm_counter+=1 #########results are stored in a file################## print strftime("%Y-%m-%d %H:%M:%S", gmtime()) out_file_com = open("communities", "w") idc = 0 for c in communities.values(): out_file_com.write("%s\n" % (' '.join([str(x) for x in sorted(c)]))) idc += 1 out_file_com.flush() out_file_com.close()
def expand(graph, subset, maxit, forced=0): """Expands the given subset with the more likely determined by Parameters ---------- graph : the networkx graph subset : the subset to be expanded maxit : the maximum number of iterations or nodes to expand by Method ------ See paper, expands by best candidate Returns ------- order : the order in which the nodes were engulfed """ # set up community and candidate data structures cs = community.Community() external_nodes = cs.init(graph, subset) cand = candidates.Candidates(graph, external_nodes, cs) cs.init_bounds(cand) cand.rework_fringe() # set up accounting data structures for experimentation order = list(subset) m = order[-1] stat_hist = [(copy.copy(cs.nodes[m]), cand.stat_import(cs.nodes[m]), cand.stats_string())] sd_hist = [cand.stat_import(cs.nodes[m])[cs.nodes[m]['reason']]] closure_hist = [closure(cs, cand)] count = 0 while (forced or closure(cs, cand) > 0) and (count < maxit): if closure(cs, cand) == 0: forced -= 1 m = cand.get_best() if m == None: if not forced: break else: m = cand.get_forced() if m == None: break if forced or cs.is_candidate(cand.close[m]): order.append(m) changed = cs.add_node(graph, m, cand.fringe) cand.add_connectivity(changed, m) cand.remove_node(m) else: print "BUG (?) in EXPAND" stat_hist.append( (copy.copy(cs.nodes[m]), cand.stat_import(cs.nodes[m]), cand.stats_string())) sd_hist.append(cand.stat_import(cs.nodes[m])[cs.nodes[m]['reason']]) closure_hist.append(closure(cs, cand)) count += 1 cs, cand = cut_last_closure(graph, order, cs, cand, closure_hist) imp = cand.stat_import({'e': cs.bounds['min_e'], 'p': cs.bounds['min_p']}) """ print "Finished in ", count, " steps." print " With Closure: ", closure(cs, cand), " With ", len(cs.nodes), " nodes." print " The standard deviation away for e is: ", imp['e'], " and p: ", imp['p'] """ return cs, cand, order, stat_hist, sd_hist, closure_hist
if seed is '': if past_seed: seed = past_seed else: print("No seed was provided. Using random seed") seed = random.randint(0, 100000000) print(seed) try: seed = int(seed) except (ValueError): print("Please enter an integer") seed = past_seed continue random.seed(a=seed) commune = community.Community().generate() past_seed = seed seed = None for i, c in enumerate(commune): if i == 0: print(f"{c.first_name} is {c.age} and is ", end='') elif i < len(commune) - 1: print( f"{c.first_name}'s ({c.age}) {c.relations[commune[i-1]]}, who is ", end='') else: print( f"{c.first_name}'s ({c.age}) {c.relations[commune[i-1]]}.") commune = sorted(commune, key=lambda x: x.age, reverse=True)
def initializeCommunities(self): for leader in self.leaders: self.communities.append(COMM.Community(leader))