示例#1
0
def main():
    graph = read_graph('sample_graph')
    print(graph)
    print(brute_force_color(graph, 3))
    print('greedy color of K_10', greedy_color(complete_graph(10)))
    star = read_graph('star_graph')
    print(brute_force_color(star), greedy_color(star),
          greedy_unordered_color(star))
示例#2
0
def main(argv):
    graph_g = graph.read_graph(argv[1])
    graph_h = graph.read_graph(argv[2])
    subgraph = isomorphic_subgraph(graph_g, graph_h)

    if subgraph is not None:
        print("Isomorphic vertices:")
        print(", ".join([str(v) for v in sorted(subgraph)]))
        return 0
    else:
        print("No isomorphic vertices.")
        return 1
def write_index_from_graph(graph_input_file, index_output_file, num_cores = 7, verbose = True):
    graph = read_graph(graph_input_file)
    nodes = list(graph.keys())
    list_list_nodes = []
    for i in range(num_cores):
       list_list_nodes.append(list())
    num_nodes = len(nodes)
    num_nodes_for_thread = num_nodes // num_cores
    actual_node = 0
    for i in range(num_cores):
        for j in range(num_nodes_for_thread):
            list_list_nodes[i].append(nodes[actual_node])
            actual_node += 1
    actual_list = 0
    while actual_node < num_nodes:
        list_list_nodes[actual_list].append(nodes[actual_node])
        actual_node += 1
        actual_list += 1
        actual_list %= num_cores
    p = Pool(processes=num_cores)
    for i in range(num_cores):
        p.apply_async(generate_pages_contents, (list_list_nodes[i],))
    p.close()
    p.join()
    if verbose:
        print("Threads have finished their job")
示例#4
0
def extractMethods(fpath):
    config.log = open(fpath + "log", "w")

    config.init()  #	Initialize all the global variables
    adj = graph.read_graph(fpath)
    #	dotGenerator.generate_dot(adj,fpath+"Input")
    config.log.write("\n----- Main()\n")
    config.log.write("\t\t Input File Name : " + fpath + "\n")

    ctrList = pvh.read_CtrlLoc(fpath)
    pvh.InsertSource(adj)  # used in get_iter_p_index
    config.log.write("\t\tVertexHash Contents :\t" + str(config.vHash) + "\n")

    pc.precomputations(adj, ctrList)

    if config.flag['InGr'] != 0:
        config.log.write("\t\tInput Graph:\n")
        graph.write(adj, config.log)

    adj = sg.segmentation(adj, ctrList)

    #	commented the call to gp.generate(adj,fpath) method on June 10, 2019. Reason: Generates error.
    #	config.log.write("Calling argument generator\n")
    #	gp.generate(adj,fpath)	#Added on January 9, 2019

    #	output.dump_output(adj)

    #	remove_help_files(sys.argv[1])	#added on 4 July
    #	subprocess.call(["rm", "*.pyc")
    config.log.write("This is last of log.\n")
    config.log.close()

    formattedOutput(sys.argv[1])
示例#5
0
 def get_gc(self, region):
     if region in self.gcs:
         return self.gcs[region]
     fname = os.path.join(graph_dir, region + '.graph')
     g = graph.read_graph(fname)
     gc = graph.GraphContainer(g)
     self.gcs[region] = gc
     return gc
示例#6
0
def compute_tps(filename):
    """ Write your implementation to create a topological sort here. 
    Store your answer in tps"""
    """ <filename> is the name of the input file containing graph information:
    you need to read it in and perform the topological sort, saving the results
    in tps, then use write_tps_to_file() to output it to a file called output_<filename>"""

    #gets the directed graph from a file
    fileDict = graph.read_graph(filename)

    #calls a recursive toposort and puts it in tps
    tps = recursive_topsort(fileDict)
    write_tps_to_file(tps, filename)
示例#7
0
文件: src.py 项目: kiram15/cs320
def compute_tps(filename):
    startGraph = graph.read_graph(filename)  #tracks keys and outDegrees
    startDict = {}
    # declaring them all as white to start
    for i in startGraph.keys():
        startDict[i] = 'W'

    stringTps = [] #backwards topological sort
    #while there are unvisited nodes
    while 'W' in startDict.values():
        for node in startDict:
            if (startDict[node] == 'W'):
                visit(node, startDict, startGraph, stringTps)

    #reverse sort because to add in at the beginning would increase complexity
    stringTps.reverse()
    write_tps_to_file(stringTps, filename)
示例#8
0
def optimal_assign_n_condense(input_dir, input_file, out_dir, bij_file):
    # prepare the output directory
    small_cc_num = 0
    if os.path.isdir(out_dir) == False:
        os.mkdir(out_dir)
    # open the map file
    w_bij = open(bij_file, 'w')
    # read the graph into adj
    adj = read_graph(input_dir + "/" + input_file)
    # perform the optiaml assignment
    adj = opt_assin(adj, 0, 1, w_bij)
    # condense the graph
    adj = condense(adj, 0, 1)
    adj = condense(adj, 1, 0)
    # perform the optiaml assignment again
    adj = opt_assin(adj, 0, 1, w_bij)
    # vis_graph(adj, "after_condense.dot")
    return adj
示例#9
0
def main():
    graphs = [
        #        graph.create_ex_graph(),

        #        graph.create_random_graph(100, 0.05),
        #        graph.create_random_graph(500, 0.01),
        #        graph.create_random_graph(1000, 0.005),

        #        graph.create_scale_free_graph(100),
        #        graph.create_scale_free_graph(500),
        #        graph.create_scale_free_graph(1000),
        graph.read_graph('data/enron.txt', 'enron-5'),
        #        graph.read_graph('data/hepth.txt', 'hepth'),
    ]

    with open('out/times.csv', 'a') as f:
        for g in graphs:
            compute_graph(g, f)
示例#10
0
def compute_tps(filename):
    """ Write your implementation to create a topological sort here. 
    Store your answer in tps"""
    """ <filename> is the name of the input file containing graph information:
    you need to read it in and perform the topological sort, saving the results
    in tps, then use write_tps_to_file() to output it to a file called output_<filename>"""
    """ Read the file into a dictionary representation """
    g = graph.read_graph(filename)

    import time
    start_time = time.time()
    """ Find in degree for each node in the graph """
    in_deg = {a: 0 for a in g}
    for a in g:
        for b in g[a]:
            in_deg[b] += 1
    """ If a node in the graph has in degree of 0, add it to the queue """
    from collections import deque
    q = deque()
    for c in in_deg:
        if in_deg.get(c) == 0:
            q.appendleft(c)
    """ Add nodes that have 0 in degree to the tps """
    tps = []
    while q:
        node = q.pop()
        tps.append(str(node))
        """ For the nodes connected to the added node, subtract 1 from their in degree """
        for d in g[node]:
            in_deg[d] -= 1
            """ If in degree is now 0, add to the queue for processing """
            if in_deg.get(d) == 0:
                q.appendleft(d)

    end_time = time.time()

    write_tps_to_file(tps, filename)

    print("Ran in: {:.5f} secs".format(end_time - start_time))
def clean_graph_and_index(graph_input_file, index_input_file):
    graph = read_graph(graph_input_file, integer = False)
    index = read_index(index_input_file, integer = False, also_inverted = False)
    page_to_remove = []
    
    #check for pages present in index but not in graph, if there are, remove those pages from the index
    for page in index:
        if page not in graph:
            page_to_remove.append(page)
    for page in page_to_remove:
        del index[page]

    #check for pages present in graph but not in index, if there are, remove those pages from the graph
    node_to_remove = []
    for node in graph:
        if node not in index:
            node_to_remove.append(node)

    for node in node_to_remove:
        delete_node(graph, node)

    write_index(index, index_input_file+'_cleaned')
    write_graph(graph, graph_input_file+'_cleaned')
示例#12
0
def clean_graph_and_index(graph_input_file, index_input_file):
    graph = read_graph(graph_input_file, integer=False)
    index = read_index(index_input_file, integer=False, also_inverted=False)
    page_to_remove = []

    #check for pages present in index but not in graph, if there are, remove those pages from the index
    for page in index:
        if page not in graph:
            page_to_remove.append(page)
    for page in page_to_remove:
        del index[page]

    #check for pages present in graph but not in index, if there are, remove those pages from the graph
    node_to_remove = []
    for node in graph:
        if node not in index:
            node_to_remove.append(node)

    for node in node_to_remove:
        delete_node(graph, node)

    write_index(index, index_input_file + '_cleaned')
    write_graph(graph, graph_input_file + '_cleaned')
示例#13
0
    def __init__(self,
                 height: float = 500,
                 width: float = 500,
                 v_diameter: float = 15,
                 output_file: str = None,
                 input_file: str = None,
                 verbose: bool = True,
                 submit_fn: Callable[[str, any], any] = None,
                 submit_button_text: str = 'Submit',
                 submit_button_tooltip: str = ''):
        self.output_file = output_file or 'graph.txt'
        self.verbose = verbose
        self.submit_fn = submit_fn
        self.sumbit_button_text = submit_button_text
        self.submit_button_tooltip = submit_button_tooltip
        self.w = tk.Tk()
        self.c = tk.Canvas(height=height, width=width, background='white')
        self.height = height
        self.width = width
        self.v_diameter = v_diameter
        self.remove_mode = False
        self.selected = None
        self.vertex_id = dict()
        self.label_id = dict()
        self.edge_id = dict()

        self.background = None
        self.moving_edge = None
        self.clear()
        self.g = Graph() if not input_file else read_graph(input_file)
        self.init_bindings()
        self.init_buttons()

        self.c.pack(fill="both", expand=True)
        if input_file:
            self.random_paint_graph()
        self.w.mainloop()
示例#14
0
    # Some agents spontaneously want cars
    if random.random() < PROB_WANT_CAR:
        this_agent.desire_car()

    this_agent.execute_sched(graph)
    

parser = argparse.ArgumentParser(description="process graph and station function")
parser.add_argument('nodes', metavar='NODES', type=str, help='Path to node file')
parser.add_argument('edges', metavar='EDGES', type=str, help='Path to edge file')
parser.add_argument('cities', metavar='CITIES', type=str, help='Path to cities file')
args = parser.parse_args()

p = Pool(200)

graph, cities = graph.read_graph(args.nodes, args.edges, args.cities)

neighborhoods = {}
work_areas = {}
city_dict = {}
num_nodes = len(graph.nodes)

for node in graph.nodes:
    neighborhoods[node] = []
    work_areas[node] = []
    if node.city in city_dict:
        city_dict[node.city].append(node)
    else:
        city_dict[node.city] = [node]

#create agents
示例#15
0
            line = line.rstrip()
            rank = float(line)
            rankings.append(rank)
    return rankings

def write_rankings(rankings, rankfile):
    with open(rankfile, 'w') as f:
        for i in range(len(rankings)):
            f.write(str(rankings[i])+"\n")

if __name__ == "__main__":
    from graph import read_graph, get_transition_matrix, get_inverse_transition_matrix, get_reverse_graph
    from sys import argv
    from numpy import matrix
    from time import clock
    graph = read_graph(argv[1], integer = True)
    print("Finished Read graph")

    step = 1000
    confidence = 0.00000001

    # transition_matrix = matrix(get_transition_matrix(graph))
    # print("Finished Get transition_matrix")
    start = clock()
    prTime, prRank = pageRank(graph,  step = step, confidence = confidence, verbose = True)
    print("Finished classical pageRank")
    end = clock()
    print("Tempo: " + str(end-start))
    write_rankings(prRank, argv[1]+'_pagerank')
    reverse_graph = get_reverse_graph(graph)
    inv_step, inv_scores = pageRank(reverse_graph, step = step, confidence = confidence, verbose = True)
示例#16
0
 def assertCycle(self, out, graph_fname, msg):
     try:
         return parse_cycle(out, graph.read_graph(graph_fname))
     except Exception as e:
         raise AssertionError(msg) from e
            tile_data['gc'] = None
            path1 = model_utils.Path(tile_data['gc'],
                                     tile_data,
                                     start_loc=start_loc)

            compute_targets = SAVE_EXAMPLES or FOLLOW_TARGETS
            result = eval([path1],
                          m,
                          session,
                          start_points,
                          save=SAVE_EXAMPLES,
                          compute_targets=compute_targets,
                          follow_targets=FOLLOW_TARGETS)

            save_path = output_fname + "{}.out.graph".format(REGION)
            path1.graph.save(save_path)

        else:
            g = graph.read_graph(EXISTING_GRAPH_FNAME)
            r = g.bounds()
            tile_data = {
                'region': REGION,
                'rect': r.add_tol(WINDOW_SIZE / 2),
                'search_rect': r,
                'cache': cache,
                'starting_locations': [],
            }
            path = model_utils.Path(None, tile_data, g=g)
            for vertex in g.vertices:
                path.prepend_search_vertex(vertex)
示例#18
0
            q.appendleft(c)
    """ Add nodes that have 0 in degree to the tps """
    tps = []
    while q:
        node = q.pop()
        tps.append(str(node))
        """ For the nodes connected to the added node, subtract 1 from their in degree """
        for d in g[node]:
            in_deg[d] -= 1
            """ If in degree is now 0, add to the queue for processing """
            if in_deg.get(d) == 0:
                q.appendleft(d)

    end_time = time.time()

    write_tps_to_file(tps, filename)

    print("Ran in: {:.5f} secs".format(end_time - start_time))


if __name__ == '__main__':
    """ Write code here to run compute_tps for your testing purposes"""
    import sys
    filename = sys.argv[1]
    compute_tps(filename)
    f = "output_" + filename
    s = read_topo_sort_from_file(f)
    tps = map(int, s.split())
    g = graph.read_graph(filename)
    """ print(check_TPS(g, tps)) """
示例#19
0
import sys
from graph import read_graph

adj = read_graph(sys.argv[1])

for key in adj[0]:
    idx = int(key) - 1 if int(key) % 2 == 1 else int(key) + 1
    if len(adj[0][key]) > 1 and str(idx) not in adj[0] and str(
            idx) not in adj[1]:
        print "yes not in the same component", key
print "no, they are all in the same component"
示例#20
0
import os
from graph import read_graph
from graph import vis_graph
from graph import delete_reg_CC

#construct the graph
#it's a bi-direction graph


####output number of reg CC to a speicific file
def write_interim_result(num_reg_CC, interim_file):
    writer = open(interim_file, "w")
    writer.write("num_reg_CC: " + str(num_reg_CC) + "\n")


####main entrance####
#####################
input_file = sys.argv[1]
out_dir = sys.argv[2]
interim_file = sys.argv[3]
vis_file = sys.argv[4] + ".dot"

adj = read_graph(input_file)
#vis_graph(adj, vis_file)
num_reg_CC, count = delete_reg_CC(adj, out_dir, "")
write_interim_result(num_reg_CC, interim_file)

##additional debug info
#adj = read_graph("/Users/zhaomingyin/Dropbox/research/code/optec-code/data/dist/irrCC/200_0.1_0.1_0.1/1.0_0.1_0.1_0/0.gr")
#vis_graph(adj, "/Users/zhaomingyin/Dropbox/research/code/optec-code/vis0.dot")
示例#21
0
from dataset_building import generate_pages_contents
from index import write_index
from graph import read_graph
import sys

if __name__ == "__main__":
    input_file = sys.argv[1]
    graph = read_graph(input_file, integer=False)
    nodes = list(graph.keys())
    index = generate_pages_contents(nodes, sys.argv[1])
    write_index(index, sys.argv[1] + '_index')
    #write_index_from_graph('toy_graph', 'toy_index', num_cores = 4)
from dataset_building import generate_pages_contents
from index import write_index
from graph import read_graph
import sys

if __name__ == "__main__":
    input_file = sys.argv[1]
    graph = read_graph(input_file, integer = False)
    nodes = list(graph.keys())
    index = generate_pages_contents(nodes, sys.argv[1])
    write_index(index, sys.argv[1]+'_index')
    #write_index_from_graph('toy_graph', 'toy_index', num_cores = 4)
示例#23
0
out_dir = sys.argv[2]
opt_tmp_file = sys.argv[3]
algo = sys.argv[4]
vis_folder = sys.argv[5]
bij_file = sys.argv[6]
dict_dir = sys.argv[7]

print opt_tmp_file

if os.path.isdir(out_dir) == False:
    os.mkdir(out_dir)
# list all files in the input folder
files = [f for f in listdir(input_dir) if isfile(join(input_dir, f))]
print files
small_cc_num = 0
for f in files:
    adj = optimal_assign_n_condense(input_dir, f, out_dir, bij_file)
    small_cc_num += compute_dis_n_output(adj, algo, f, out_dir)

files = [f for f in listdir(out_dir) if isfile(join(out_dir, f))]
print files
for f in files:
    adj = read_graph(out_dir + "/" + f)
    # when rename, there should be a map file stored
    new_adj = rename_adj(adj, out_dir + "/" + f, dict_dir + "/" + f)
    # vis_graph(new_adj, "after_rename.dot")

# append the number of small irregular component to the end of the file
writer = open(opt_tmp_file, 'a')
writer.write("num_small_irr_CC: " + str(small_cc_num) + "\n")
from ranking import read_rankings, write_rankings, pageRank
from graph import read_graph
from sys import argv
from time import clock

if __name__ == "__main__":
    graph = read_graph(argv[1])
    s = float(argv[2])
    step = int(argv[3])
    confidence = float(argv[4])
    start = clock()
    prTime, prRank = pageRank(graph, step = step, confidence = confidence, s = s, verbose = True)
    output_file_name = argv[1]+'_pagerank_s_'+str(s)+'_step_'+str(step)+'_confidence_'+str(confidence)
    write_rankings(prRank, output_file_name)
    end = clock()
    print("Time: " + str(end-start))
示例#25
0
import geom, graph

graph_dir = "/home/ck/data/cities/out/graph_infer/roadtracer-M/"
graph_name_list = [
    "amsterdam", "chicago", "denver", "la", "montreal", "paris", "pittsburgh",
    "saltlakecity", "san diego", "tokyo", "toronto", "vancouver"
]
for graph_name in graph_name_list:
    print("current graph:" + graph_name)
    ori_graph = graph.read_graph(graph_dir + graph_name + ".infer.graph")
    total_number = 0
    total_connect = 0
    new_edge_list = []
    seg_pair_list = []
    for i, edge in enumerate(ori_graph.edges):
        if edge.src.id == 0:
            length = edge.segment().length()
            if length > 70:
                seg_pair_list.append(edge.dst)
                if len(seg_pair_list) == 2:
                    edge1 = graph.Edge(len(new_edge_list), seg_pair_list[0],
                                       seg_pair_list[1])
                    if edge1.segment().length() < 61:
                        print("connect with length {}".format(
                            edge1.segment().length()))
                        total_connect += 1
                        new_edge_list.append(edge1)
                        new_edge_list.append(
                            graph.Edge(
                                len(new_edge_list) + 1, seg_pair_list[1],
                                seg_pair_list[0]))
示例#26
0
文件: search.py 项目: egorps/progfun
    return result


def dfs(g, v, result=None):
    if not result:
        result = SearchResult(v)

    result.discover(v)

    p = g.edges[v]
    while p:
        y = p.y
        if not result.is_discovered(y):
            result.set_parent(y, v)
            result = dfs(g, y, result)

        p = p.next
    result.process(v)

    return result

if __name__ == '__main__':
   g = graph.read_graph('./graph.txt')
   print "Graph from file:\n", g
   bfs_result = bfs(g, 0)
   print "BFS Path to 7:", bfs_result.get_path(7)

   dfs_result = dfs(g, 0)
   print "DFS Path to 7:", dfs_result.get_path(7)
示例#27
0
from ranking import read_rankings, write_rankings, spamMass
from graph import read_graph
from sys import argv
from time import clock

if __name__ == "__main__":
    graph = read_graph(argv[1])
    pagerank = read_rankings(argv[2])
    trustRank = read_rankings(argv[3])
    spamMass_ranks = spamMass(graph, prRank=pagerank, trRank=trustRank)
    write_rankings(spamMass_ranks, argv[1] + '_spamMass')
示例#28
0
            rankings.append(rank)
    return rankings


def write_rankings(rankings, rankfile):
    with open(rankfile, 'w') as f:
        for i in range(len(rankings)):
            f.write(str(rankings[i]) + "\n")


if __name__ == "__main__":
    from graph import read_graph, get_transition_matrix, get_inverse_transition_matrix, get_reverse_graph
    from sys import argv
    from numpy import matrix
    from time import clock
    graph = read_graph(argv[1], integer=True)
    print("Finished Read graph")

    step = 1000
    confidence = 0.00000001

    # transition_matrix = matrix(get_transition_matrix(graph))
    # print("Finished Get transition_matrix")
    start = clock()
    prTime, prRank = pageRank(graph,
                              step=step,
                              confidence=confidence,
                              verbose=True)
    print("Finished classical pageRank")
    end = clock()
    print("Tempo: " + str(end - start))