def test_accessibility_on_very_deep_graph(): gr = graph() gr.add_nodes(range(0,311)) # 2001 for i in range(0,310): #2000 gr.add_edge((i,i+1)) recursionlimit = getrecursionlimit() accessibility(gr) assert getrecursionlimit() == recursionlimit
def test_accessibility_on_very_deep_graph(self): gr = pygraph.classes.graph.graph() gr.add_nodes(range(0,2001)) for i in range(0,2000): gr.add_edge((i,i+1)) recursionlimit = getrecursionlimit() accessibility(gr) assert getrecursionlimit() == recursionlimit
def test_accessibility_on_very_deep_graph(): gr = graph() gr.add_nodes(range(0, 2001)) for i in range(0, 2000): gr.add_edge((i, i + 1)) recursionlimit = getrecursionlimit() accessibility(gr) assert getrecursionlimit() == recursionlimit
def schwartz_set_heuristic(self): # Iterate through using the Schwartz set heuristic self.actions = [] while len(self.graph.edges()) > 0: access = accessibility(self.graph) mutual_access = mutual_accessibility(self.graph) candidates_to_remove = set() for candidate in self.graph.nodes(): candidates_to_remove |= (set(access[candidate]) - set(mutual_access[candidate])) # Remove nodes at the end of non-cycle paths if len(candidates_to_remove) > 0: self.actions.append({'nodes': candidates_to_remove}) for candidate in candidates_to_remove: self.graph.del_node(candidate) # If none exist, remove the weakest edges else: edge_weights = self.edge_weights(self.graph) self.actions.append({ 'edges': matching_keys(edge_weights, min(edge_weights.values())) }) for edge in self.actions[-1]["edges"]: self.graph.del_edge(edge) self.graph_winner()
def sort_nodes_topologically(graph, nodeLs): """ Get a topological sort of a subset of the nodes of a graph @type graph: graph_wrapper.GraphWrapper @param graph: a graph in which the nodes reside @type nodeLs: list [node] @param nodeLs: a list of nodes from which to generate sorting. nodes must not be mutually accessive! @rtype: list [node] @return: topological sort of the nodes """ # uid_dic = dict([(node.uid,node) for node in nodeLs]) # helperNodes = uid_dic.keys() helperGraph = graph.__class__(originalSentence="") # TODO: efficiency - this is done this way to avoid circular dependency helperGraph.add_nodes(nodeLs) acc = accessibility(graph) for node1 in nodeLs: for node2 in acc[node1]: if node2 in nodeLs: if node1.uid != node2.uid: # TODO: efficiency helperGraph.add_edge((node1, node2)) sorted_nodes = topological_sorting(helperGraph) return sorted_nodes
def sort_nodes_topologically(graph, nodeLs): """ Get a topological sort of a subset of the nodes of a graph @type graph: graph_wrapper.GraphWrapper @param graph: a graph in which the nodes reside @type nodeLs: list [node] @param nodeLs: a list of nodes from which to generate sorting. nodes must not be mutually accessive! @rtype: list [node] @return: topological sort of the nodes """ # uid_dic = dict([(node.uid,node) for node in nodeLs]) # helperNodes = uid_dic.keys() helperGraph = graph.__class__( originalSentence="" ) # TODO: efficiency - this is done this way to avoid circular dependency helperGraph.add_nodes(nodeLs) acc = accessibility(graph) for node1 in nodeLs: for node2 in acc[node1]: if node2 in nodeLs: if node1.uid != node2.uid: # TODO: efficiency helperGraph.add_edge((node1, node2)) sorted_nodes = topological_sorting(helperGraph) return sorted_nodes
def sort_out_covering_exons (cursor, exons): # havana is manually curated and gets priority is_ensembl = {} is_havana = {} for exon in exons: logic_name = get_logic_name(cursor, exon.analysis_id) is_ensembl[exon] = ('ensembl' in logic_name) is_havana [exon] = ('havana' in logic_name) dg = digraph() dg.add_nodes(exons) for exon1, exon2 in combinations(dg.nodes(),2): master, covered = find_master(cursor, exon1,exon2,is_ensembl,is_havana) if master is not None and covered is not None: dg.add_edge(master,covered) assert not find_cycle(dg) clusters = dict(((k,v) for k,v in accessibility(dg).iteritems() if not dg.incidents(k))) for k in clusters: clusters[k].remove(k) for master_exon, covered_list in clusters.iteritems(): master_exon.covering_exon = -1 # nobody's covering this guy master_exon.covering_exon_known = -1 # formal for covered_exon in covered_list: covered_exon.covering_exon = master_exon.exon_id covered_exon.covering_exon_known = master_exon.is_known
def is_accessible(self, node1, node2): """ Checks that is two nodes of blocks graph are connected Connection checked directionally: node1 -> node2 """ graph = self._make_flow_graph() acc = accessibility(graph) return weakref.ref(node2) in acc[weakref.ref(node1)]
def resolve_plugin_dependencies(self): graph = digraph() problems = defaultdict(list) def check_plugin_dependencies(plugin_id): result = True def add_problem(problem_type, plugin_id, dependency): problems[plugin_id].append(problem_type(plugin_id, dependency)) result = False for dependency in self.plugin_dependencies(plugin_id): if dependency.id not in self.manifests: add_problem(MissingDependency, plugin_id, dependency) elif dependency.version: if manifests[required_id].version not in dependency.version: add_problem(IncorrectVersion, plugin_id, dependency) elif dependency.id not in graph: if dependency.id in self.enabled_plugins: add_problem(IndirectDependency, plugin_id, dependency) else: add_problem(DisabledDependency, plugin_id, dependency) return result def remove_dependents(plugin_id): for node in traversal(graph, plugin_id, 'pre'): for dependent in graph[node]: edge = node, dependent problems[dependent].append(IndirectDependency(dependent, graph.get_edge_properties(edge)['dependency'])) graph.del_node(node) graph.add_nodes(self.enabled_plugins) for plugin_id in self.enabled_plugins: if check_plugin_dependencies(plugin_id): for dependency in self.plugin_dependencies(plugin_id): edge = dependency.id, plugin_id graph.add_edge(edge) graph.set_edge_properties(edge, dependency=dependency) else: remove_dependents(plugin_id) transitive_deps = accessibility(graph) cycle_nodes = [ node for node in graph if any( (node in transitive_deps[dependent]) for dependent in transitive_deps[node] if dependent != node)] for node in cycle_nodes: problems[node].append(CyclicDependency(node)) graph.del_node(node) self.dependency_graph = graph self._dependency_problems = problems self._load_order = topological_sorting(graph)
def transitive_closure_from_pygraph_as_edge_list(pygraph): accessibility_matrix = accessibility(pygraph) result = set() for from_node, to_nodes in accessibility_matrix.iteritems(): for to_node in to_nodes: if from_node != to_node: result.add((from_node, to_node)) return result
def test_accessibility_in_digraph(self): gr = testlib.new_digraph() gr.add_nodes(['a', 'b', 'c']) gr.add_edge(('a', 'b')) gr.add_edge(('a', 'c')) ac = accessibility(gr) for n in gr: for m in gr: if (m in ac[n]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def test_accessibility_in_digraph(self): gr = testlib.new_digraph() gr.add_nodes(['a','b','c']) gr.add_edge(('a','b')) gr.add_edge(('a','c')) ac = accessibility(gr) for n in gr: for m in gr: if (m in ac[n]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def FindParentNode(self, gr): transitive_closure = accessibility.accessibility(gr) most_accesisible_node = None for node_in, nodes_out in transitive_closure.iteritems(): if most_accesisible_node is None: most_accesisible_node = node_in max_value = len(transitive_closure[most_accesisible_node]) this_value = len(nodes_out) if this_value > max_value: most_accesisible_node = node_in return most_accesisible_node
def get_bad_bodies(self, obj_to_grasp): obj_name = obj_to_grasp.GetName() if doJointInterpretation: obstructions = accessibility(self.obstruction_digraph).get(obj_name, None) future_objects = [] if doJointInterpretation and obstructions is not None: future_objects = obstructions future_objects.remove(obj_name) # print self.obstruction_digraph # print obj_name, future_objects # raw_input("!!") bad_body_filter = lambda b: (b.GetName() in future_objects) \ or (b.GetName() in self.unMovableObjects) return set(filter(bad_body_filter, self.env.GetBodies()))
def accessibility(self): """ Accessibility matrix (transitive closure). @rtype: dictionary @return: Accessibility information for each node. """ access_ = accessibility.accessibility(self.graph) access = {} for each in list(access_.keys()): if (each[1] == 'n'): access[each[0]] = [] for other in access_[each]: if (other[1] == 'n'): access[each[0]].append(other[0]) return access
def test_accessibility_hypergraph(self): gr = hypergraph() # Add some nodes / edges gr.add_nodes(range(8)) gr.add_hyperedges(['a', 'b', 'c']) # Connect the 9 nodes with three size-3 hyperedges for node_set in [['a',0,1,2], ['b',2,3,4], ['c',5,6,7]]: for node in node_set[1:]: gr.link(node, node_set[0]) access = accessibility(gr) assert 8 == len(access) for i in range(5): assert set(access[i]) == set(range(5)) for i in range(5,8): assert set(access[i]) == set(range(5,8))
def test_accessibility_hypergraph(self): gr = hypergraph() # Add some nodes / edges gr.add_nodes(range(8)) gr.add_hyperedges(['a', 'b', 'c']) # Connect the 9 nodes with three size-3 hyperedges for node_set in [['a', 0, 1, 2], ['b', 2, 3, 4], ['c', 5, 6, 7]]: for node in node_set[1:]: gr.link(node, node_set[0]) access = accessibility(gr) assert 8 == len(access) for i in range(5): assert set(access[i]) == set(range(5)) for i in range(5, 8): assert set(access[i]) == set(range(5, 8))
def dependent_nodes(graph, start_nodes): """ >>> graph = GraphConversions.edge_list_to_pygraph([('a', 'b'), ('b', 'c'), ('a', 'c')]) >>> sorted(GraphAlgorithms.dependent_nodes(graph, ['c'])) ['a', 'b', 'c'] """ if isinstance(graph, BasicGraph): accessibility_matrix = GraphAlgorithms.accessibility_matrix_from_graph( graph, inverse=True) elif isinstance(graph, digraph): accessibility_matrix = accessibility(graph.reverse()) else: raise TypeError("%s is not a known graph type", graph) # dependent_nodes = set() # for start_node in start_nodes: # if start_node in accessibility_matrix: # dependent_nodes.update(accessibility_matrix[start_node]) # return dependent_nodes # TODO ist das effizienter als return CollectionTools.union_all(accessibility_matrix[start_node] for start_node in start_nodes if start_node in accessibility_matrix)
def ccm_fast_export(releases, graphs): global acn_ancestors global users users = users() logger.basicConfig(filename='ccm_fast_export.log',level=logger.DEBUG) commit_lookup = {} # Get the initial release for k, v in releases.iteritems(): if k == 'delimiter': continue if k == 'ccm_types': continue if v['previous'] is None: release = k break logger.info("Starting at %s as initial release" % release) if 'created' not in releases[release]: initial_release_time = 0.0 # epoch for now since releases[release] has no 'created' key :( else: initial_release_time = time.mktime(releases[release]['created'].timetuple()) mark = 0 files = [] # Create the initial release # get all the file objects: file_objects = (ccm_cache.get_object(o) for o in releases[release]['objects']) project_obj = ccm_cache.get_object(releases[release]['fourpartname']) paths = project_obj.get_members() for o in file_objects: if o.get_type() != 'dir': object_mark, mark = create_blob(o, mark) for p in paths[o.get_object_name()]: files.append('M ' + releases['ccm_types']['permissions'][o.get_type()] + ' :'+str(object_mark) + ' ' + p) empty_dirs = releases[release]['empty_dirs'] logger.info("Empty dirs for release %s\n%s" %(release, empty_dirs)) mark = create_blob_for_empty_dir(get_mark(mark)) #file_list = create_file_list(objects, object_lookup, releases['ccm_types'], empty_dirs=empty_dirs, empty_dir_mark=mark) if empty_dirs: for d in empty_dirs: if mark: path = d + '/.gitignore' files.append('M 100644 :' + str(mark) + ' ' + path) mark = get_mark(mark) commit_info = ['reset refs/tags/' + release, 'commit refs/tags/' + release, 'mark :' + str(mark), 'author Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'committer Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data 15', 'Initial commit', '\n'.join(files), ''] print '\n'.join(commit_info) logger.info("git-fast-import:\n%s" %('\n'.join(commit_info))) tag_msg = 'Release: %s' %release annotated_tag = ['tag %s' % release, 'from :%s' % str(mark), 'tagger Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data %s' % len(tag_msg), tag_msg] print '\n'.join(annotated_tag) commit_lookup[release] = mark # do the following releases (graphs) release_queue = deque(releases[release]['next']) while release_queue: release = release_queue.popleft() previous_release = releases[release]['previous'] logger.info("Next release: %s" % release) commit_graph = graphs[release]['commit'] commit_graph = fix_orphan_nodes(commit_graph, previous_release) commit_graph = ch.spaghettify_digraph(commit_graph, previous_release, release) #htg.commit_graph_to_image(commit_graph, releases[release], graphs[release]['task'], name=releases[release]['name']+'_after' ) # Find the cutting nodes logger.info("Finding the cutting nodes") undirected = graph() undirected.add_nodes(commit_graph.nodes()) [undirected.add_edge(edge) for edge in commit_graph.edges()] cutting_nodes = cut_nodes(undirected) del undirected # Create the reverse commit graph logger.info("Building the reverse commit graph") reverse_commit_graph = commit_graph.reverse() # Compute the accessibility matrix of the reverse commit graph logger.info("Compute the ancestors") ancestors = accessibility(reverse_commit_graph) del reverse_commit_graph logger.info("Ancestors of the release: %s" % str(ancestors[release])) # Clean up the ancestors matrix for k, v in ancestors.iteritems(): if k in v: v.remove(k) # Get the commits order commits = topological_sorting(commit_graph) # Fix the commits order list commits.remove(previous_release) commits.remove(release) last_cutting_node = None # Check if the release (Synergy project has changed name, if it has the # 'base' directory name needs to be renamed if releases.has_key('delimiter'): delim = releases['delimiter'] else: delim = '-' previous_name = previous_release.split(delim)[0] current_name = release.split(delim)[0] if current_name != previous_name: logger.info("Name changed: %s -> %s" %(previous_name, current_name)) from_mark = commit_lookup[previous_release] mark, commit = rename_toplevel_dir(previous_name, current_name, release, releases, mark, from_mark) print '\n'.join(commit) # adjust the commit lookup commit_lookup[previous_release] = mark for counter, commit in enumerate(commits): logger.info("Commit %i/%i" % (counter+1, len(commits))) acn_ancestors = [] if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] # Create the references lists. It lists the parents of the commit #reference = [commit_lookup[parent] for parent in ancestors[commit] if parent not in acn_ancestors] reference = [commit_lookup[parent] for parent in commit_graph.incidents(commit)] if len(reference) > 1: # Merge commit mark = create_merge_commit(commit, release, releases, mark, reference, graphs, set(ancestors[commit]) - set(acn_ancestors)) else: # Normal commit mark = create_commit(commit, release, releases, mark, reference, graphs) # Update the lookup table commit_lookup[commit] = mark # Update the last cutting edge if necessary if commit in cutting_nodes: last_cutting_node = commit if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] reference = [commit_lookup[parent] for parent in ancestors[release] if parent not in acn_ancestors] logger.info("Reference %s" %str([parent for parent in ancestors[release] if parent not in acn_ancestors])) if not reference: logger.info("Reference previous %s, mark: %d" % (releases[release]['previous'], commit_lookup[releases[release]['previous']])) reference = [commit_lookup[ releases[release]['previous'] ] ] mark, merge_commit = create_release_merge_commit(releases, release, get_mark(mark), reference, graphs, set(ancestors[release]) - set(acn_ancestors)) print '\n'.join(merge_commit) annotated_tag = create_annotated_tag(releases, release, mark) print '\n'.join(annotated_tag) commit_lookup[release] = mark release_queue.extend(releases[release]['next']) #release = releases[release]['next'] #release = None #reset to master master = get_master_tag() reset = ['reset refs/heads/master', 'from :' + str(commit_lookup[master])] logger.info("git-fast-import:\n%s" %('\n'.join(reset))) print '\n'.join(reset)
def accessibility_wo_self(graph): ret = accessibility(graph) for k in ret: ret[k].remove(k) return ret
def accessibility_matrix_from_graph(graph, inverse=False): pygraph = GraphConversions.graph_to_pygraph(graph, inverse=inverse) accessibility_matrix = accessibility(pygraph) return accessibility_matrix
def get_components(self): graph_components = accessibility(self) return { self.nodesMap[key.uid]: [self.nodesMap[v.uid] for v in value] for key, value in graph_components.iteritems() }
def ccm_fast_export(releases, graphs): global acn_ancestors global users users = users() logger.basicConfig(filename='ccm_fast_export.log',level=logger.DEBUG) commit_lookup = {} # Get the initial release for k, v in releases.iteritems(): if k == 'delimiter': continue if k == 'ccm_types': continue if v['previous'] is None: release = k break logger.info("Starting at %s as initial release" % release) if 'created' not in releases[release]: initial_release_time = 0.0 # epoch for now since releases[release] has no 'created' key :( else: initial_release_time = time.mktime(releases[release]['created'].timetuple()) mark = 0 files = [] # Create the initial release # get all the file objects: file_objects = [ccm_cache.get_object(o) for o in releases[release]['objects']] project_obj = ccm_cache.get_object(releases[release]['fourpartname']) paths = project_obj.get_members() for o in file_objects: if o.get_type() != 'dir': object_mark, mark = create_blob(o, mark) for p in paths[o.get_object_name()]: files.append('M ' + releases['ccm_types']['permissions'][o.get_type()] + ' :'+str(object_mark) + ' ' + p) empty_dirs = releases[release]['empty_dirs'] logger.info("Empty dirs for release %s\n%s" %(release, empty_dirs)) mark = create_blob_for_empty_dir(get_mark(mark)) #file_list = create_file_list(objects, object_lookup, releases['ccm_types'], empty_dirs=empty_dirs, empty_dir_mark=mark) if empty_dirs: for d in empty_dirs: if mark: path = d + '/.gitignore' files.append('M 100644 :' + str(mark) + ' ' + path) mark = get_mark(mark) commit_info = ['reset refs/tags/' + release, 'commit refs/tags/' + release, 'mark :' + str(mark), 'author Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'committer Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data 15', 'Initial commit', '\n'.join(files), ''] print '\n'.join(commit_info) logger.info("git-fast-import:\n%s" %('\n'.join(commit_info))) tag_msg = 'Release: %s' %release annotated_tag = ['tag %s' % release, 'from :%s' % str(mark), 'tagger Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data %s' % len(tag_msg), tag_msg] print '\n'.join(annotated_tag) commit_lookup[release] = mark # do the following releases (graphs) release_queue = deque(releases[release]['next']) while release_queue: release = release_queue.popleft() previous_release = releases[release]['previous'] logger.info("Next release: %s" % release) commit_graph = graphs[release]['commit'] commit_graph = fix_orphan_nodes(commit_graph, previous_release) commit_graph = ch.spaghettify_digraph(commit_graph, previous_release, release) #htg.commit_graph_to_image(commit_graph, releases[release], graphs[release]['task'], name=releases[release]['name']+'_after' ) # Find the cutting nodes logger.info("Finding the cutting nodes") undirected = graph() undirected.add_nodes(commit_graph.nodes()) [undirected.add_edge(edge) for edge in commit_graph.edges()] cutting_nodes = cut_nodes(undirected) del undirected # Create the reverse commit graph logger.info("Building the reverse commit graph") reverse_commit_graph = commit_graph.reverse() # Compute the accessibility matrix of the reverse commit graph logger.info("Compute the ancestors") ancestors = accessibility(reverse_commit_graph) del reverse_commit_graph logger.info("Ancestors of the release: %s" % str(ancestors[release])) # Clean up the ancestors matrix for k, v in ancestors.iteritems(): if k in v: v.remove(k) # Get the commits order commits = topological_sorting(commit_graph) # Fix the commits order list commits.remove(previous_release) commits.remove(release) last_cutting_node = None # Check if the release (Synergy project has changed name, if it has the # 'base' directory name needs to be renamed if releases.has_key('delimiter'): delim = releases['delimiter'] else: delim = '-' previous_name = previous_release.split(delim)[0] current_name = release.split(delim)[0] if current_name != previous_name: logger.info("Name changed: %s -> %s" %(previous_name, current_name)) from_mark = commit_lookup[previous_release] mark, commit = rename_toplevel_dir(previous_name, current_name, release, releases, mark, from_mark) print '\n'.join(commit) # adjust the commit lookup commit_lookup[previous_release] = mark for counter, commit in enumerate(commits): logger.info("Commit %i/%i" % (counter+1, len(commits))) acn_ancestors = [] if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] # Create the references lists. It lists the parents of the commit #reference = [commit_lookup[parent] for parent in ancestors[commit] if parent not in acn_ancestors] reference = [commit_lookup[parent] for parent in commit_graph.incidents(commit)] if len(reference) > 1: # Merge commit mark = create_merge_commit(commit, release, releases, mark, reference, graphs, set(ancestors[commit]) - set(acn_ancestors)) else: # Normal commit mark = create_commit(commit, release, releases, mark, reference, graphs) # Update the lookup table commit_lookup[commit] = mark # Update the last cutting edge if necessary if commit in cutting_nodes: last_cutting_node = commit if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] reference = [commit_lookup[parent] for parent in ancestors[release] if parent not in acn_ancestors] logger.info("Reference %s" %str([parent for parent in ancestors[release] if parent not in acn_ancestors])) if not reference: logger.info("Reference previous %s, mark: %d" % (releases[release]['previous'], commit_lookup[releases[release]['previous']])) reference = [commit_lookup[ releases[release]['previous'] ] ] mark, merge_commit = create_release_merge_commit(releases, release, get_mark(mark), reference, graphs, set(ancestors[release]) - set(acn_ancestors)) print '\n'.join(merge_commit) annotated_tag = create_annotated_tag(releases, release, mark) print '\n'.join(annotated_tag) commit_lookup[release] = mark release_queue.extend(releases[release]['next']) #release = releases[release]['next'] #release = None #reset to master master = get_master_tag() reset = ['reset refs/heads/master', 'from :' + str(commit_lookup[master])] logger.info("git-fast-import:\n%s" %('\n'.join(reset))) print '\n'.join(reset)
(options, args) = parser.parse_args() if args: if args[0] == "-": f = sys.stdin else: f = open(args[0]) elif not sys.stdin.isatty(): f = sys.stdin else: parser.error("Need input from file or stdin") T = int(f.readline()) from pygraph.classes.digraph import digraph from pygraph.algorithms.accessibility import accessibility for i in range(1,T+1): N = int(f.readline()) fols = [int(x) for x in f.readline().split()] assert len(fols) == N G = digraph() for a in range(1,N+1): G.add_node(a) for j,a in enumerate(G.nodes()): assert a != fols[j] # a follows fol[j] G.add_edge((fols[j],a)) # edge (b,a) means b is followed by a # every vertex has in-degree exactly 1 print "Case #%d:" % i A = accessibility(G) #print A for a in range(1,N+1): print len(A[a])
def main(gr, n): recursionlimit = getrecursionlimit() for i in range(n): accessibility(gr) assert getrecursionlimit() == recursionlimit
def get_components(self): graph_components = accessibility(self) return {self.nodesMap[key.uid]:[self.nodesMap[v.uid] for v in value] for key, value in graph_components.iteritems() }
def group(setN, gr, write_mark, minimum_cluster, alpha): ## print gr ## print write_mark N = len(setN) ## print N density = zeros(N) for i in range(N): distances = 0.0 neighbor_set = gr.neighbors(i) for each_node in neighbor_set: tmp_d = setN[i] - setN[each_node] distances += sqrt(inner(tmp_d, tmp_d)) if distances == 0.0 or len(neighbor_set) == 0: ## print i,N,'in the pool' ## raw_input('ISB..') distances = 0.0 else: ## print distances,float(len(neighbor_set)) distances = distances / float(len(neighbor_set)) density[i] = 1.0 / pow(1 + distances, 2.0) density_copy = deepcopy(density) ## print density #* 1 * remain_set = set(range(N)) clusters = {} for i in range(N): if is_max(i, gr, density): clusters[i] = i remain_set.remove(i) density_copy[i] = -1 ## print 'remaining set',len(remain_set) #* 2 * while len(remain_set) > 0: unlabeled_max = argmax(density_copy) density_copy[unlabeled_max] = -1 #* 3 * neighbor_set = gr.neighbors(unlabeled_max) tmp_density = density[neighbor_set] label_index = neighbor_set[argmax(tmp_density)] clusters[unlabeled_max] = clusters[label_index] remain_set.remove(unlabeled_max) #algorithm 2 #find boundary edges cluster_centers = clusters.values() ## print len(cluster_centers),cluster_centers borders = [] for each_edge in gr.edges(): if clusters[each_edge[0]] != clusters[each_edge[1]]: if each_edge[0] < each_edge[1]: borders.append(each_edge) #find big clusters connected_groups = connected_components(gr) group_count = len(set(connected_groups.values())) big_cluster_head = [] tmp_set_appeared = set([]) for head, group_index in connected_groups.items(): if group_index not in tmp_set_appeared: big_cluster_head.append(head) tmp_set_appeared.add(group_index) #construct edge set heads_tails = accessibility(gr) head_and_tail = {} for head, tail in heads_tails.items(): if head in big_cluster_head: head_and_tail[connected_groups[head]] = tail #tresholds of the super clusters Gc = {} for head, tail in head_and_tail.items(): tmp_tresh = 0.0 count = 0.0 for each_edge in itertools.combinations(tail, 2): if gr.has_edge(each_edge): count += 1.0 tmp_tresh += abs(density[each_edge[0]] - density[each_edge[1]]) if count == 0.0: ## print('sigularity') Gc[head] = 0.0 else: Gc[head] = alpha * tmp_tresh / count #* 2 * ## print 'group,tresh',borders,Gc while len(borders) > 0: ## print borders,'borders' current_border = borders.pop() Dab = max(density[current_border[0]], density[current_border[1]]) Dca = density[clusters[current_border[0]]] Dcb = density[clusters[current_border[1]]] Gtresh = Gc[connected_groups[current_border[0]]] ## print Dca - Dab,Dcb - Dab,Gtresh,'x' if connected_groups[current_border[0]] != connected_groups[ current_border[1]]: raw_input('there be a problem') if ((Dca - Dab < Gtresh) | (Dcb - Dab < Gtresh)) == False: clusterA = clusters[current_border[0]] clusterB = clusters[current_border[1]] ## print gr,'sb' gr.del_edge(current_border) tmp_borders = deepcopy(borders) for each_edge in tmp_borders: if (clusters[each_edge[0]] == clusterA and clusters[each_edge[1]] == clusterB) or ( clusters[each_edge[1]] == clusterA and clusters[each_edge[0]] == clusterB): ## raw_input('del') gr.del_edge(each_edge) borders.remove(each_edge) ## out_cast = [] ## for i in range(len(setN)): ## if gr.neighbors(i) == 0: ## out_cast.append(i) ## for isolation in out_cast: ## gr.del_node(isolation) ## setN.remove(isolation) ## print len(setN) ## print write_mark if write_mark: connected_groups = connected_components(gr) group_count = len(set(connected_groups.values())) if minimum_cluster > group_count: grade = minimum_cluster - group_count from pygraph.algorithms.minmax import cut_tree yourcut = cut_tree(gr) ## print 'cut tree',yourcut.values(),grade yourset = yourcut.values() for i in range(grade): print min(yourset) yourset.remove(min(yourset)) max_degree = min(yourset) ## print max_degree,yourset for edge_name, cut_degree in yourcut.items(): if (cut_degree < max_degree + 1) and (gr.has_edge(edge_name)): ## print edge_name gr.del_edge(edge_name) print 'cluster break x 1', edge_name
print "Preordering" print pre print "Postordering" print post print "\n" bfs, bfsord= breadth_first_search(gr, root='A') print "Breadth first search" print "Spanning tree" print bfs print "level-based ordering" print bfsord print "\n" print "Accessibility" access= accessibility(gr) print access print "Mutual accessibility" maccess= mutual_accessibility(gr) print maccess print "\n" print "Traversal" trav= traversal(gr, 'A', 'pre') for t in trav: print t print "Transitive Edges" tredges= transitive_edges(gr) print tredges print "\n"