Beispiel #1
0
def make_partition_list(graph, number_samples = 100, tree_algorithm = random_spanning_tree_wilson, equi = True):
    
    #Note -- currently this is configured only for 2 partitions
    total_number_trees_edges_pairs = np.exp(log_number_trees(graph))*(len(graph.nodes()) - 1)
    
    uniform_trees = []
    for i in range(number_samples):
        uniform_trees.append(tree_algorithm(graph))
        
    partitions = []
    for tree in uniform_trees:
        if equi == -1:
            e = random.choice(list(tree.edges()))
            blocks = remove_edges_map(graph, tree, [e])
            new_partition = partition_class(graph, blocks, tree, e, total_number_trees_edges_pairs)
            new_partition.set_likelihood()
            partitions.append(new_partition)
        else:
            out = almost_equi_split(tree, 2,.1)
            if out != None:
                e = out[0]
                blocks = remove_edges_map(graph, tree, [e])
                new_partition = partition_class(graph, blocks, tree, e, total_number_trees_edges_pairs)
                new_partition.set_likelihood()
                partitions.append(new_partition)
    return partitions
def random_equi_partitions(graph, num_partitions, num_blocks, algorithm = "Wilson"):
    '''
    Here is the code that makes equi partitions.
    
    :graph:
    :num_partitions:
    :num_blocks: Number of blocks in each partition
    
    
    
    '''
    found_partitions = []
    counter = 0
    while len(found_partitions) < num_partitions:
        counter += 1
        if algorithm == "Broder":
            tree = random_spanning_tree(graph)    
        if algorithm == "Wilson":
            tree = random_spanning_tree_wilson(graph)
        if algorithm == "MST":
            for edge in graph.edges():
                graph.edges[edge]["weight"] = np.random.uniform(0,1)
            #Do we need to reset this each time?
            tree = nx.minimum_spanning_tree(graph)
        edge_list = equi_split(tree, num_blocks)
        #edge_list will return None if there is no equi_split
        if edge_list != None:
            found_partitions.append(remove_edges_map(graph, tree, edge_list))
            print(len(found_partitions), "waiting time:", counter)
            counter = 0
            #keeps track of how many trees it went through to find the one
            #that could be equi split
    return found_partitions
def random_almost_equi_partitions_with_walk(graph, num_partitions, num_blocks, delta, step = "Basis", jump_size = 50):
    '''This produces a delta almost equi partition... it keeps looping until it finds
    the required amounts
    
    '''
#    print("am here")
#    print(step)
    found_partitions = []
    counter = 0
    tree = random_spanning_tree_wilson(graph)
    while len(found_partitions) < num_partitions:
        counter += 1
        if step == "Basis":
            for i in range(jump_size):
                tree, edge_to_remove, edge_to_add = propose_step(graph, tree)
        if step == "Broder":
            for i in range(jump_size):
                tree, edge_to_remove, edge_to_add = propose_Broder_step(graph, tree)
        edge_list = almost_equi_split(tree, num_blocks, delta)
        #If the almost equi split was not a delta split, then it returns none...
        if edge_list != None:
            blocks = remove_edges_map(graph, tree, edge_list)
            found_partitions.append(blocks)
            print(len(found_partitions), "waiting time:", counter)
            counter = 0
    return found_partitions
def random_split_fast(graph, tree, num_blocks, delta, allowed_counts=100):
    pop = [tree.nodes[i]["POP10"] for i in tree.nodes()]
    total_population = np.sum(pop)

    ideal_weight = total_population / num_blocks

    label_weights(tree)

    acceptable_nodes = list(tree.nodes())
    acceptable_nodes.remove(tree.graph["root"])
    helper_list = copy.deepcopy(acceptable_nodes)
    acceptable_sizes = [[
        m * (ideal_weight * (1 - delta)), m * (ideal_weight * (1 + delta))
    ] for m in range(1, num_blocks)]
    '''
A weight (for a subtree) is acceptable iff it's weight is in [m (ideal - delta) , m (ideal + delta)] for some m. Heuristically, take delta small '''

    for vertex in helper_list:
        if not acceptable(tree.node[vertex]["weight"], acceptable_sizes):
            acceptable_nodes.remove(vertex)

    sample_subforest = nx.subgraph(tree, acceptable_nodes)
    sample_subforest = sample_subforest.to_undirected()
    components = list(nx.connected_component_subgraphs(sample_subforest))
    #test_tree(sample_subforest)
    if len(acceptable_nodes) < num_blocks - 1:
        print("bad tree")
        return False

    was_good = False
    counter = 0
    while (was_good == False) and (counter < allowed_counts):

        vertices = component_sampler(components, num_blocks)
        if vertices == False:
            return False
            #This is the case when the allowable set isn't big enough support a partition... this relies on the assumption that delta is small so that no allowable interval of nodes can contain a district...
        counter += 1
        was_good = checker(tree, vertices, ideal_weight, delta,
                           total_population)
    if counter >= allowed_counts:
        return "FailedToFind"
    print(checker(tree, vertices, ideal_weight, delta, total_population))
    print("counter:", counter)

    edges = [list(tree.out_edges(x))[0] for x in vertices]
    partition = remove_edges_map(graph, tree, edges)

    ratios = []
    for block in partition:
        block_pop = np.sum([tree.nodes[x]["POP10"] for x in block.nodes()])
        ratios.append(block_pop / ideal_weight)

    print(ratios)

    return ratios
Beispiel #5
0
def equi_shadow_walk(graph, tree, num_steps, num_blocks):
    found_partitions = []
    counter = 0
    while len(found_partitions) < num_steps:
        counter += 1
        tree, edge_list = equi_shadow_step(graph, tree, num_blocks)
        if edge_list != None:
            found_partitions.append(remove_edges_map(graph, tree, edge_list))
            print(len(found_partitions), "waiting time:", counter)
            counter = 0
    return found_partitions
Beispiel #6
0
def likelihood_tree_edges_pair(graph, tree, edge_list):
    '''
    
    This computes the partition associated ot (graph, tree, edge_list)
    and computes the log-likelihood that that partition is drawn via uniform tree
    with uniform edge_list sampling method
    
    graph = the graph to be partitioned
    tree = a chosen spanning tree on the graph
    edge_list = list of edges that determine the partition
    
    there are two terms in the log-likelihood:
        tree_term = from the number of spanning trees within each block
        connector_term = from the number of ways to pick a spanning tree to 
        hook up the blocks
    
    the way that connector_term works:
        1. It builds a graph whose nodes are the blocks in the partitions
        and whose multi-edges correspond to the set of edges connecting those blocks
        2. It uses the multi-graph (or weighted graph) version of Kirkoffs theorem to compute the
        ways to 
        
    the way that tree_term works:
        for each block of hte partition, it compute the number of spanning trees
        that that induced subgraph as.
    returns (tree term + connector term)
    TODO -- rewrite score to be 1 / this
    
    '''
    partition = remove_edges_map(graph, tree, edge_list)
    #this gets the list of subgraphs from (tree, edges) pair
    tree_term = np.sum([log_number_trees(g) for g in partition])

    #Building connector term:
    connector_graph = nx.Graph()
    connector_graph.add_nodes_from(partition)
    for subgraph_1 in partition:
        for subgraph_2 in partition:
            if subgraph_1 != subgraph_2:
                cutedges = cut_edges(graph, subgraph_1, subgraph_2)
                if cutedges != []:
                    connector_graph.add_edge(subgraph_1,
                                             subgraph_2,
                                             weight=len(cutedges))
    cut_weight = log_number_trees(connector_graph, True)
    return (tree_term + cut_weight)
def random_almost_equi_partitions(graph, num_partitions, num_blocks, delta):
    '''This produces a delta almost equi partition... it keeps looping until it finds
    the required amounts
    
    '''
    found_partitions = []
    counter = 0
    while len(found_partitions) < num_partitions:
        counter += 1
        tree = random_spanning_tree_wilson(graph)
        edge_list = almost_equi_split(tree, num_blocks, delta)
        #If the almost equi split was not a delta split, then it returns none...
        if edge_list != None:
            blocks = remove_edges_map(graph, tree, edge_list)
            found_partitions.append(blocks)
            print(len(found_partitions), "waiting time:", counter)
            counter = 0
    return found_partitions
Beispiel #8
0
            p = np.exp(log_number_trees(H)) / np.exp(log_number_trees(graph))
            #Replace this with a computation of effective resistance!
            #You also don't need to compute this so many times!
            total_inverse += 1/p
        edge = choice(list(graph.edges()))
        H = nx.contracted_edge(graph, edge)
        q = np.exp(log_number_trees(H)) / np.exp(log_number_trees(graph))
        p = (1 / q) / total_inverse
        print(p, q, total_inverse)
        c = uniform(0,1)
        if c <= p:
            T.append(edge)
            graph = nx.contracted_edge(graph, edge, self_loops=False)
        else:
            if q < .999999:
                graph.remove_edge(edge[0], edge[1])
    tree = nx.Graph()
    tree.add_edges_from(T)
    edge = T[0]
    return (tree, edge)

m = 6
graph = nx.grid_graph([m,m])
tree, edge = kirkoff_inverse_sampler(graph)

nx.draw(tree)

partition = remove_edges_map(graph, graph, [edge])
visualize(graph, partition)
????