def build_network_tree(self, w, p_a, module_list, ql_init=9999): """ build up a network tree """ # initialize quality functions QL = ql.Quality() # register the initial clustering result to the Tree object initial_parent_id = 0 self.__Tree.add_one_level(module_list, initial_parent_id) # indicate the initial tree state print("initial state of tree") self.__Tree.tree_draw_with_ete3(0) # calculate initial ql value self.ql_global_best = QL.get_hierarchical_quality_value( self.__Tree.get_tree_list(), self.glob_w, self.glob_pa) self.final_store = copy.deepcopy(self.__Tree.get_tree_list()) print("initial global quality value: ", self.ql_global_best) # start of recursive extention of branches self.one_level_finer(w, p_a, initial_parent_id, ql_init) print("final state of tree") #self.__Tree.print_tree() self.__Tree.tree_draw_with_ete3(initial_parent_id, self.ql_global_best)
def restart_clustering(self, w, p_a, parent_id): """ restart network division after the state of submodule movement and then restart the recursive clustering after """ QL = ql.Quality() #print("##### start re-clustering for node id", parent_id) node_list, module_list = self.__Tree.subtree2modulelist(parent_id) #print("##### module list before\n",module_list) w_part, pa_part, id_glo_loc = self.extract_partial_w_pa( w.tocsr(), p_a, module_list) #print("##### w_part pa_part check", w_part, pa_part, id_glo_loc) # restart clustering from this state restarted_cluster = cc.Cluster_Core(w_part, pa_part, node_list, module_list) # take new state of module list module_list = restarted_cluster.get_modules() restarted_cluster.set_nodes_global_id(id_glo_loc) #print("##### module list after\n",module_list) #modify the tree composition self.__Tree.replace_subtree(parent_id, module_list) ql_now = restarted_cluster.get_ql_final() #ql_now = QL.get_hierarchical_quality_value(self.__Tree.get_tree_list(), self.glob_w, self.glob_pa) return ql_now
def one_level_finer(self, w, p_a, grand_parent_id, ql_init): """ this function tries to expand each branch of the tree by being called recursively tree_elements, level : . ------------ grand parent | \ . . -------- parent |\ |\ . . . . ----- child : : : : """ # initiation QL = ql.Quality() loop_count = 0 ql_best = ql_init ql_now = None store_tree = copy.deepcopy(self.__Tree.get_tree_list()) while loop_count < cf.num_trial: # for fast conversion if grand_parent_id != 0: loop_count = cf.num_trial queue_ids = copy.deepcopy( self.__Tree.get_element_object(grand_parent_id).id_child) while queue_ids: # get the parent id of this branch parent_id = queue_ids[0] mod = self.__Tree.tree_ele2one_module(parent_id) num_nodes = mod.get_num_nodes() if num_nodes == 1: # module with only one member may not be divided anymore pass else: # extract the partial w matrix and pa array w_part, pa_part, id_glo_loc = self.extract_partial_w_pa( w.tocsr(), p_a, mod) sub_level = cc.Cluster_Core(w_part, pa_part) # set global node ids sub_level.set_nodes_global_id(id_glo_loc) sub_modules = sub_level.get_modules() if len(sub_modules) == 1 or len(sub_modules) == num_nodes: pass else: # get quality value ql_temp = sub_level.get_ql_final() # append a branch to the tree # register a new branch self.__Tree.add_one_level(sub_modules, parent_id) erased_id = self.one_level_finer( w, p_a, parent_id, ql_temp) # get quality value #ql_temp = sub_level.get_ql_final() #ql_temp = QL.get_hierarchical_quality_value(self.__Tree.get_tree_list(), self.glob_w, self.glob_pa) # modify the queue list if erased_id != None: for i in range(len(queue_ids)): ele_id = queue_ids[i] if ele_id >= erased_id: queue_ids[i] -= 1 # erase a queue already done queue_ids.pop(0) # reconstruct module_list from subtree node_list, module_list = self.__Tree.subtree2modulelist( grand_parent_id) # restart clustering ql_now = self.restart_clustering(w, p_a, grand_parent_id) # if the quality of this subtree is imploved if QL.check_network_got_better(ql_best, ql_now) == True: ql_best = ql_now ## store and replace the state of the entire tree store_tree = copy.deepcopy(self.__Tree.get_tree_list()) #self.final_store = copy.deepcopy(self.__Tree.get_tree_list()) else: # go to the next loop pass loop_count += 1 #if grand_parent_id == 1: ######## for test ql_global_temp = QL.get_hierarchical_quality_value( self.__Tree.get_tree_list(), self.glob_w, self.glob_pa) if QL.check_network_got_better(self.ql_global_best, ql_global_temp) == True: self.ql_global_best = ql_global_temp self.final_store = copy.deepcopy(self.__Tree.get_tree_list()) if grand_parent_id == 0: #print("ql_best", ql_best) #print(store_tree) self.__Tree.tree_draw_with_ete3(0, ql_now) # erase "#" for indicate tree states at each step #print( self.__Tree.print_tree()) #self.__Tree.tree_draw_with_ete3(0, ql_global_temp) ### end while loop self.__Tree.set_tree_list(store_tree) # reload the best state of tree # sub module movement will be invoked # when extention for one subtree stoped (need to ) if grand_parent_id != 0: #print("finish all branches of this subtree finished") #print("id", grand_parent_id, "will be erased") self.submodule_movement_onesubtree(grand_parent_id) erased_id = grand_parent_id else: #print("recursive tree branch extention finished") self.__Tree.set_tree_list(self.final_store) erased_id = None #print("ql initial ---> best", ql_init, " ---> ",ql_best) return erased_id
import boundary import solver import mesh import quality if __name__ == '__main__': MESH_DIR = 'usMeshSmall/' BOUNDARY_DICT = boundary.BoundaryDict(MESH_DIR + 'boundary.JSON') mesh = mesh.Mesh(MESH_DIR, BOUNDARY_DICT) q = quality.Quality(mesh) q.print_stats() s = solver.UnstructuredSolver(mesh) s.solve(it=2500, e=0.00001) s.results_to_foam()
def __init__(self, w, p_a, *init_nods_mods): self.__nodes = [] self.__modules = [] self.minimum_codelength = 0. # theoretical limiti of code length by Shannon's source coding theorem keys_for_node_extract = [[-1]] if len(init_nods_mods) == 0: # initialize node/module object list from w and p_a self.init_nods_mods(p_a) else: # start clustering from already separated modules self.__nodes = init_nods_mods[0] self.__modules = init_nods_mods[1] # quarity object QL = ql.Quality() ql_initial = QL.get_quality_value(self.__modules, w, p_a) # variable for following the change of community quality ql_now = ql_initial # conut the number of 1st step attempted times attempt_count = 0 # initial number of modules num_modules = len(self.__modules) # total number of nodes total_num_nodes = 0 for i, mod in enumerate(self.__modules): total_num_nodes += mod.get_num_nodes() # prepare for marged w matrix and p_a array for generated network w_merged = w pa_merged = p_a ###-###-# first loop: continue node movement till the code length stops to be improved while True: #print("\n\n\n") #print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%") #print("% ") #print("\nSearch algorithm: ", attempt_count, " attempt start\n") #print("% ") #print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%") #print("\n\n\n") #print("### 1st step --- node movement ###") # (re-)generate the random order for picking a node to be moved random_sequence = np.arange(num_modules) np.random.shuffle(random_sequence) # array to store module ids without member module_id_to_be_erased = [] # store ql value for checking its change between each pass ql_before = ql_now ###-###-###-# second loop: for each node movement for i in range(len(random_sequence)): # find a module where the quality value become the best score # set a place of module to be moved in array self.__modules[ ] # mp_i starts from 0 mp_i = random_sequence[i] num_nodes = self.__modules[mp_i].get_num_nodes() # skip the attempt for modules without member node if num_nodes == 0: break # get a list of node id to be moved # all nodes in one module will be moved to neighbor module # this movement is equivalent the movement of re-built nodes after re-construction of network in Louvain method node_ids_to_be_moved = self.__modules[mp_i].get_node_list() for i_l, id_node_moved in enumerate(node_ids_to_be_moved): # get a list of neighboring module id neighbor_list = self.__modules[mp_i].get_neighbor_list( w_merged, self.__modules, id_node_moved) if len(neighbor_list) != 0: # remove nodes from its module self.__modules[mp_i].remove_node(id_node_moved) ql_min = ql_now # dump ql value # dump module id for destination dump_mod_id = -1 ###-###-###-###-###-###-# third loop: for moving a node to neighboring modules for index, mod_id_neigh in enumerate(neighbor_list): # dump the neighbor module object #print ("attempt: ", attempt_count, "n-th node: ", i," move trial: ", index,"mod_id_neigh: ", mod_id_neigh) if mod_id_neigh != self.__modules[ mod_id_neigh - 1].get_module_id(): print( "neibor module id and list id not matched") sys.exit(1) dump_module = copy.deepcopy( self.__modules[mod_id_neigh - 1]) # add nodes to one of neighboring module self.__modules[mod_id_neigh - 1].add_node_temp(id_node_moved) # calculate code length # check if all nodes are in the same module -> in this case map equation is not defined. ql_trial = QL.get_quality_value( self.__modules, w_merged, pa_merged) #print ("ql change, minimum_ql ---> this trial node move: ", ql_min, " ---> ",ql_trial) if QL.check_network_got_better( ql_min, ql_trial ) == True: # if the clusting become better ql_min = ql_trial dump_mod_id = mod_id_neigh success_dump = copy.deepcopy( self.__modules[mod_id_neigh - 1]) # return the temporal node movement self.__modules[mod_id_neigh - 1] = copy.deepcopy(dump_module) else: # return the temporal node movement self.__modules[mod_id_neigh - 1] = copy.deepcopy(dump_module) # when any ql improvement happened if QL.check_network_got_better(ql_now, ql_min) == True: # decide one of a neighbor module as a destination of the movement # add nodes to one of neighboring module self.__modules[dump_mod_id - 1] = copy.deepcopy(success_dump) # update ql value ql_now = ql_min # if no improvement found else: #print ("\n###destination not found\n\n") # return nodes to its former module self.__modules[mp_i].add_node_temp(id_node_moved) # print for indicate node movement of each step #print(self.__modules) #print("### attempt count:", attempt_count, ", 1st step end") module_id_to_be_erased = self.get_module_ids_without_node( self.__modules) #print("we are just removing modules: ", module_id_to_be_erased) # module id rename module_id_to_be_erased.sort() erase_count = 0 for ind, mod_id in enumerate(module_id_to_be_erased): # erase __module objects which has no node member self.__modules.pop(mod_id - 1 - erase_count) erase_count += 1 # rename and sort module id self.rename_sort_module_id(self.__modules, self.__nodes) # build summed module list keys_for_node_extract = self.compress_modules( self.__modules, keys_for_node_extract, total_num_nodes) # get summed pa, w matrix pa_merged, w_merged = self.get_merged_pa_w_array( w, p_a, self.__modules, keys_for_node_extract) # reset number of modules num_modules = len(self.__modules) # reset total number of nodes total_num_nodes = 0 for i, mod in enumerate(self.__modules): total_num_nodes += mod.get_num_nodes() # exit the search algorithm when the change of quality value became lower than the threshold if QL.check_network_converged(ql_before, ql_now) == True: #print("#########################################") #print("#") #print("# clustring core algorithm Converged") #print("# improved quality value: ", ql_now) #print("# difference %: ", ql_now/ql_initial*100) #print("#########################################") self.ql_final = ql_now break attempt_count += 1 # output for division result of this step with local node ids #print("modules divided:\n", self.__modules) # rebuild module list del self.__modules[:] self.rebuild_module_list(keys_for_node_extract) # calculate enter/exit/internal link weights self.sum_link_weight_and_set(w, p_a, self.__modules)