Esempio n. 1
0
def load_qrep(fn):
    assert ".pkl" in fn
    with open(fn, "rb") as f:
        query = pickle.load(f)

    query["subset_graph"] = \
            nx.OrderedDiGraph(json_graph.adjacency_graph(query["subset_graph"]))
    query["join_graph"] = json_graph.adjacency_graph(query["join_graph"])

    return query
def update_bad_qrep(qrep):
    from sql_rep.query import parse_sql
    qrep = parse_sql(qrep["sql"],
                     None,
                     None,
                     None,
                     None,
                     None,
                     compute_ground_truth=False)
    qrep["subset_graph"] = \
            nx.OrderedDiGraph(json_graph.adjacency_graph(qrep["subset_graph"]))
    qrep["join_graph"] = \
            nx.OrderedDiGraph(json_graph.adjacency_graph(qrep["join_graph"]))
    return qrep
Esempio n. 3
0
def construct_graph_from_json(jsonData):
    """Construct initial graph from networkx.json_graph adjacency json format

    :jsonData: adjacency_graph data in json format
    :returns: networkx graph
    """
    return json_graph.adjacency_graph(jsonData)
def clean_json_adj_load(file_name):
    with open(file_name) as d:
        json_data = json.load(d)
    H = json_graph.adjacency_graph(json_data)
    for edge_here in H.edges():
        del (H[edge_here[0]][edge_here[1]]["id"])
    return H
def json_graph_list_load(file):
    with open_filename(file, 'r') as f:
        js_graph_list = json.load(f)
    return [
        remove_id_from_json_graphs(json_graph.adjacency_graph(js_graph))
        for js_graph in js_graph_list
    ]
Esempio n. 6
0
def get_graph():

    with open("sectorGraph/sectorGraphProcessed.json") as data_file:
       data = json.load(data_file)

    sectorGraph = json_graph.adjacency_graph(data)
    return sectorGraph
 def test_multigraph(self):
     G = nx.MultiGraph()
     G.add_edge(1, 2, key='first')
     G.add_edge(1, 2, key='second', color='blue')
     H = adjacency_graph(adjacency_data(G))
     nx.is_isomorphic(G, H)
     assert H[1][2]['second']['color'] == 'blue'
Esempio n. 8
0
def construct_graph_from_json(json_file, pop_col=None, area_col=None, district_col=None):
    """Construct initial graph from networkx.json_graph adjacency JSON format.

    :json_file: Path to JSON file.
    :returns: NetworkX graph.

    """
    with open(json_file) as f:
        data = json.load(f)

    g = json_graph.adjacency_graph(data)

    networkx.set_node_attributes(g, 'population', 0)
    networkx.set_node_attributes(g, 'areas', 0)
    networkx.set_node_attributes(g, 'CD', 0)

    # add specific values for column names as specified.
    if pop_col:
        p_col = networkx.get_node_attributes(g, pop_col)
        networkx.set_node_attributes(g, name='population', values=p_col)

    if area_col:
        a_col = networkx.get_node_attributes(g, area_col)
        networkx.set_node_attributes(g, name='areas', values=a_col)

    if district_col:
        cd_col = networkx.get_node_attributes(g, district_col)
        networkx.set_node_attributes(g, name='CD', values=cd_col)

    return g
def clean_json_adj_load(file_name):
    with open(file_name) as d:
        json_data = json.load(d)
    H = json_graph.adjacency_graph(json_data)
    for edge_here in H.edges():
        del(H[edge_here[0]][edge_here[1]]["id"])
    return H
 def load_graphs_from_mongo(cls, project_name, session_uid):
     mdb = pymongo.MongoClient('mongodb://127.0.0.1:27017')
     docs = mdb.config['graph'].find({'project_name': project_name, 'session_uid': session_uid})
     for doc in docs:
         doc['graph'] = json_graph.adjacency_graph(pickle.loads(bz2.decompress(doc['graph'])))
         cls.ADJ_GRAPHS[doc['scene']] = doc
     mdb.close()
Esempio n. 11
0
def get_graph():

    with open("sectorGraph/sectorGraphProcessed.json") as data_file:
        data = json.load(data_file)

    sectorGraph = json_graph.adjacency_graph(data)
    return sectorGraph
def lambda_handler(event, context):
    if 'body' in event.keys():
        event = json.loads(event["body"])
    bucket = "districtr"
    state = event["state"].lower().replace(" ", "_")
    units = event["units"].lower().replace(" ", "")
    # district = event["dist_id"]
    plan_assignment = event["assignment"]
    keys = set(plan_assignment.keys())
    key = "dual_graphs/{}_{}.json".format(state, units)

    try:
        data = s3.get_object(Bucket=bucket, Key=key)
        g = json_graph.adjacency_graph(json.load(data['Body']))
        graph = Graph(g)
        assignment = {
            n: plan_assignment[n] if n in keys else -1
            for n in graph.nodes()
        }
        part = GeographicPartition(graph, assignment)
        return {'statusCode': 200, 'body': json.dumps(plan_evaluation(part))}

    except Exception as e:
        print(e)
        return {
            "error":
            "This state/units ({}, {}) is not supported".format(
                event["state"], event["units"])
        }
def lambda_handler(event, context):
    # TODO implement

    if 'body' in event.keys():
        event = json.loads(event["body"])
    bucket = "districtr"
    state = event["state"].lower().replace(" ", "_")
    units = event["units"].lower().replace(" ", "")
    # district = event["dist_id"]
    plan_assignment = event["assignment"]
    parts = set(plan_assignment.values())
    key = "dual_graphs/{}_{}.json".format(state, units)

    try:
        data = s3.get_object(Bucket=bucket, Key=key)
        graph = json_graph.adjacency_graph(json.load(data['Body']))
        return {
            'statusCode':
            200,
            'body':
            json.dumps({
                part: district_contiguity(
                    [n for n, p in plan_assignment.items() if p == part],
                    graph)
                for part in parts
            }),
        }

    except Exception as e:
        print(e)
        raise e
def set_up_graph(link):
    """
    The function used to convert online json file to adjacency graph
    Parameters:
        link (string): the link for online json file
    Returns:
        Graph: graph with the information
        mean: the mean value used for partition
    """
    link = "https://people.csail.mit.edu/ddeford//COUNTY/COUNTY_13.json"
    r = requests.get(link)
    data = json.loads(r.content)
    g = json_graph.adjacency_graph(data)
    graph = Graph(g)
    graph.issue_warnings()

    horizontal = []
    node_degree = []

    # find the node with degree 1 or 2 and remove it
    for node in graph.nodes():
        graph.nodes[node]["pos"] = [
            graph.node[node]['C_X'], graph.node[node]['C_Y']
        ]
        horizontal.append(graph.node[node]['C_X'])
        if graph.degree(node) == 1 or graph.degree(node) == 2:
            node_degree.append(node)

    # remove node with degree 1 or 2 since it will impact the outcome of graph
    for i in node_degree:
        graph.remove_node(i)

    # calculate mean value for partition
    mean = sum(horizontal) / len(horizontal)
    return graph, mean
Esempio n. 15
0
 def test_multigraph(self):
     G = nx.MultiGraph()
     G.add_edge(1, 2, key="first")
     G.add_edge(1, 2, key="second", color="blue")
     H = adjacency_graph(adjacency_data(G))
     nx.is_isomorphic(G, H)
     assert H[1][2]["second"]["color"] == "blue"
Esempio n. 16
0
 def get_flows(self):
     res = self.conn.get('flows')
     if res is None:
         return
     res = json.loads(res)
     for name, flow in res.iteritems():
         self.flows[name] = json_graph.adjacency_graph(flow, directed=True)
Esempio n. 17
0
def find_connection(graph_file, name1, name2):
    try:
        with open(graph_file, 'r') as json_file:
            restore_graph = json_graph.adjacency_graph(json.load(json_file))
            connection = nx.Graph()

            relation_path = nx.shortest_path(restore_graph,
                                             source=name1,
                                             target=name2)
            #name right to left
            relation_rtl = [bidialg.get_display(y) for y in relation_path]
            nx.add_path(connection, relation_rtl)

            # add movie which connect
            for x in range(0, len(relation_path) - 1):
                movie_name = bidialg.get_display(restore_graph[
                    relation_path[x]][relation_path[x + 1]]["שם הסרט"])
                connection[relation_rtl[x]][relation_rtl[x + 1]][
                    bidialg.get_display("שם הסרט")] = movie_name

            draw_graph(connection, True, relation_rtl[0],
                       relation_rtl[len(relation_rtl) - 1])

            connection_res = {}
            for x in range(0, len(relation_path) - 1):
                e = (relation_path[x], relation_path[x + 1])
                connection_res[e] = restore_graph.get_edge_data(*e)["שם הסרט"]

    except FileNotFoundError:
        print("Failed open json file")
    except:
        print("There is no connection between ", name1, " and ", name2)

    return connection_res
Esempio n. 18
0
def load_sql_rep(fn, dummy=None):
    assert ".pkl" in fn
    try:
        with open(fn, "rb") as f:
            query = pickle.load(f)
    except:
        print(fn + " failed to load...")
        exit(-1)

    query["subset_graph"] = \
            nx.OrderedDiGraph(json_graph.adjacency_graph(query["subset_graph"]))
    query["join_graph"] = json_graph.adjacency_graph(query["join_graph"])
    if "subset_graph_paths" in query:
        query["subset_graph_paths"] = \
                nx.OrderedDiGraph(json_graph.adjacency_graph(query["subset_graph_paths"]))

    return query
 def readGraph(self):
     with open('graph.json', 'r') as f:
         data = json.load(f)
         G = json_graph.adjacency_graph(data)
         for i in range(self.n):
             G.nodes[i]["payoff"] = 0
             G.nodes[i]["adopted"] = False
         self.G = G
Esempio n. 20
0
 def readGraph(self):
     with open('graph.json', 'r') as f:
         data = json.load(f)
         G = json_graph.adjacency_graph(data)
         for i in range(self.n):
             G.nodes[i]["payoff"] = 0
             G.nodes[i]["products"] = set([0, 1])
         self.G = G
Esempio n. 21
0
    def test_graph_attributes(self):
        G = nx.path_graph(4)
        G.add_node(1, color="red")
        G.add_edge(1, 2, width=7)
        G.graph["foo"] = "bar"
        G.graph[1] = "one"

        H = adjacency_graph(adjacency_data(G))
        assert H.graph["foo"] == "bar"
        assert H.nodes[1]["color"] == "red"
        assert H[1][2]["width"] == 7

        d = json.dumps(adjacency_data(G))
        H = adjacency_graph(json.loads(d))
        assert H.graph["foo"] == "bar"
        assert H.graph[1] == "one"
        assert H.nodes[1]["color"] == "red"
        assert H[1][2]["width"] == 7
Esempio n. 22
0
 def from_json(cls, json_file):
     """Load a graph from a JSON file in the NetworkX json_graph format.
     :param json_file: Path to JSON file.
     :return: Graph
     """
     with open(json_file) as f:
         data = json.load(f)
     g = json_graph.adjacency_graph(data)
     return cls(g)
def graph_from_url():
    link = input("Put graph link: ")
    r = requests.get(link)
    data = json.loads(r.content)
    g = json_graph.adjacency_graph(data)
    graph = Graph(g)
    graph.issue_warnings()

    return graph
    def test_graph_attributes(self):
        G = nx.path_graph(4)
        G.add_node(1, color='red')
        G.add_edge(1, 2, width=7)
        G.graph['foo'] = 'bar'
        G.graph[1] = 'one'

        H = adjacency_graph(adjacency_data(G))
        assert H.graph['foo'] == 'bar'
        assert H.nodes[1]['color'] == 'red'
        assert H[1][2]['width'] == 7

        d = json.dumps(adjacency_data(G))
        H = adjacency_graph(json.loads(d))
        assert H.graph['foo'] == 'bar'
        assert H.graph[1] == 'one'
        assert H.nodes[1]['color'] == 'red'
        assert H[1][2]['width'] == 7
Esempio n. 25
0
def create_subgraph(poi_input_path: str, graph_input_path: str, subgraph_output_path: str):
    """
    Read csv in poi_input_path containing point of interests (POIs) {name, lat, lon, id}
    Read json file in graph_input_path containing the complete graph (where the POIs are some of the nodes)
    Create subgraph for given POIs and save it as JSON in subgraph_output_path

    Args:
        poi_input_path (str): path of the csv containing POI having rows {name, lat, lon, id}
        graph_input_path (str): path to networkx json_graph.adjacency_data
        subgraph_output_path (str): path to write networkx subgraph as json_graph.adjacency_data
    """
    # read input
    node_ids = []
    print(f'reading POI file {poi_input_path}')
    with open(poi_input_path) as f:
        reader = csv.reader(f)
        for row in reader:
            assert len(row) >= 5
            node_id = row[4]
            node_ids.append(int(node_id))
    
    node_ids = list(set(node_ids))
    node_ids.sort()

    data = {}
    print(f'reading graph file {graph_input_path}') 
    with open(graph_input_path) as f:
        data = json.load(f)
    
    # create subgraph
    print(f'creating subgraph')
    G = json_graph.adjacency_graph(data)    
    len_node_ids = len(node_ids)
    # H = nx.to_undirected(G)    
    # num_combinations = math.factorial(len_node_ids) / (2*math.factorial((len_node_ids-2)))
    # print(f'* getting the shortest paths of {num_combinations} combinations')
    # futures = []
    # with concurrent.futures.ThreadPoolExecutor(max_workers=mp.cpu_count()) as executor:
    #     for nodes in combinations(node_ids, r=2):
    #         futures.append(executor.submit(nx.shortest_path, H, *nodes))
    num_permutations = math.factorial(len_node_ids) / math.factorial((len_node_ids-2))
    print(f'* getting the shortest paths of {num_permutations} permutations')
    futures = []
    with concurrent.futures.ThreadPoolExecutor(max_workers=mp.cpu_count()) as executor:
        for nodes in permutations(node_ids, r=2):
            futures.append(executor.submit(nx.shortest_path, G, *nodes))
    min_nodes = set()
    for future in futures:
        nodes = future.result()
        min_nodes.update(nodes)
    print('* create subgraph from the longest induced path')
    I = nx.subgraph(G, min_nodes)
    subgraph_data = json_graph.adjacency_data(I)
    with open(subgraph_output_path, 'w+') as f:
        json.dump(subgraph_data, f)
    print(f'subgraph saved in {subgraph_output_path}')
def graph_from_url(link):
    r = requests.get(url=link)
    data = json.loads(r.content)
    g = json_graph.adjacency_graph(data)
    graph = Graph(g)
    graph.issue_warnings()
    pos = {}
    for node in graph.nodes():
        pos[node] = [graph.node[node]['C_X'], graph.node[node]['C_Y']]
    return graph
Esempio n. 27
0
def _user_crowds(crowds):
    "create a mapping from user ids to cluster ids given cluster_crowds"
    crowds = (
        json_graph.adjacency_graph(c)
        for c in crowds
    )
    return {
        user:crowd.graph['id']
        for crowd in crowds
        for user in crowd
    }
Esempio n. 28
0
def construct_graph_from_json(json_file):
    """Construct initial graph from networkx.json_graph adjacency JSON format.

    :json_file: Path to JSON file.
    :returns: NetworkX graph.

    """
    with open(json_file) as f:
        data = json.load(f)

    return json_graph.adjacency_graph(data)
Esempio n. 29
0
def graph_from_url():
    # link = input("Put graph link: ")
    link = "https://people.csail.mit.edu/ddeford//COUNTY/COUNTY_37.json"  # County
    # link = "https://people.csail.mit.edu/ddeford//COUSUB/COUSUB_37.json"  # COUNTY SUB
    r = requests.get(link)
    data = json.loads(r.content)
    g = json_graph.adjacency_graph(data)
    graph = Graph(g)
    graph.issue_warnings()

    return graph
Esempio n. 30
0
 def from_json(cls, json_file):
     """Load a graph from a JSON file in the NetworkX json_graph format.
     :param json_file: Path to JSON file.
     :return: Graph
     """
     with open(json_file) as f:
         data = json.load(f)
     g = json_graph.adjacency_graph(data)
     graph = cls.from_networkx(g)
     graph.issue_warnings()
     return graph
Esempio n. 31
0
def get_graphs():
	graphs = {}
	fs = (os.walk('Graphs').next())[2]
	for f in fs:
		if '.json' in f:
			with open(('./Graphs/' + f)) as fo:
				try:
					g = json_graph.adjacency_graph(json.load(fo), multigraph=True)
					graphs[(f)] = g
				except IOError:
					print 'not a Lobster graph: ', f
	return graphs
Esempio n. 32
0
    def get_graph(self, name):

        result = self.conn.get(name)
        print result
        if result is None:
            graph = nx.Graph()
        else:
            graph = json_graph.adjacency_graph(json.loads(result))
        print graph.edges()
        if name == 'main':
            self.graph = graph
        return graph
Esempio n. 33
0
def find_all_connections(graph_file, name1, name2, maximal_length):

    try:
        with open(graph_file, 'r') as json_file:
            restore_graph = json_graph.adjacency_graph(json.load(json_file))
            path_iterator = nx.all_simple_paths(restore_graph,
                                                source=name1,
                                                target=name2,
                                                cutoff=maximal_length)
            connections_res = []

            while True:
                connection = nx.Graph()

                relation_path = next(path_iterator)

                #name right to left
                relation_rtl = [bidialg.get_display(y) for y in relation_path]
                nx.add_path(connection, relation_rtl)

                # add movie which connect
                for x in range(0, len(relation_path) - 1):
                    movie_name = bidialg.get_display(restore_graph[
                        relation_path[x]][relation_path[x + 1]]["שם הסרט"])
                    connection[relation_rtl[x]][relation_rtl[x + 1]][
                        bidialg.get_display("שם הסרט")] = movie_name

                draw_graph(connection, True, relation_rtl[0],
                           relation_rtl[len(relation_rtl) - 1])

                #append connection information
                temp_res = {}
                for x in range(0, len(relation_path) - 1):
                    e = (relation_path[x], relation_path[x + 1])
                    temp_res[e] = restore_graph.get_edge_data(*e)["שם הסרט"]

                connections_res.append(temp_res)

                txt = input(
                    "To finish write 0, to get next path write 1 (or anything else) \n"
                )
                if txt == "0":
                    break

    except FileNotFoundError:
        print("Failed open json file")
    except StopIteration:
        print("No more connections")
    except:
        print("There is no connection in length ", maximal_length, " between ",
              name1, " and ", name2)

    return connections_res
Esempio n. 34
0
def find_crowds(weak_comps):
    """
    break up big connected components using crowd detection algorithm, add
    details to crowds
    """
    crowds = []
    for crowd,weak_comp in enumerate(weak_comps):
        g = json_graph.adjacency_graph(weak_comp)
        dendo = community.generate_dendogram(nx.Graph(g))

        if len(dendo)>=2:
            partition = community.partition_at_level(dendo, 1 )
            crowd_ids = collections.defaultdict(list)
            for uid,subcrowd in partition.iteritems():
                crowd_ids[subcrowd].append(uid)
            for subcrowd,uids in sorted(crowd_ids.iteritems()):
                subg = nx.DiGraph(nx.subgraph(g,uids))
                if len(subg)>2:
                    crowds.append(subg)
        else:
            crowds.append(g)

    def _blur(angle):
        return random.triangular(angle-.02,angle+.02)

    big_spots = collections.defaultdict(list)
    lil_spots = collections.defaultdict(list)
    for index,g in enumerate(crowds):
        # location is location of user with greatest degree
        uid,degree = max(g.degree_iter(),key=operator.itemgetter(1))
        lng,lat = g.node[uid]['loc']
        big_spots[int(lng/2),int(lat/2)].append(g)
        lil_spots[int(lng*5),int(lat*5)].append(g)
        g.graph['loc'] = lng,lat
        g.graph['id'] = index

    # add noise to each location based on how popular that area is.
    for lng_lat,graphs in lil_spots.iteritems():
        graphs.sort(key=len,reverse=True)
        for index,g in enumerate(graphs):
            lng,lat = g.graph['loc']
            ang = random.random()*2*np.pi
            dist = .001 * math.sqrt(index)
            g.graph['loc'] = lng+1.2*dist*math.cos(ang), lat+dist*math.sin(ang)

    # pick crowds to show on map based on size
    for lng_lat,graphs in big_spots.iteritems():
        graphs.sort(key=len,reverse=True)
        for index,g in enumerate(graphs):
            g.graph['zoom'] = int(math.floor(1+math.log(index,3))) if index else 0

    return (json_graph.adjacency_data(g) for g in crowds)
Esempio n. 35
0
	def get_graphs(self):
	#'''walks through the directory where eurolobster stores scraped data, retrieving the graphs and inflating them'''
		graphs = {}
		filelist = os.walk(self.user_input['dir']).next()[2]
		for f in filelist:
			if '.json' in f:
				with open(('./' + (self.user_input['dir']) + '/' +  f)) as fo:
					try:
						g = json_graph.adjacency_graph(load(fo), multigraph=True)
						graphs[(f.strip('.json'))] = g
					except IOError:
						print 'not a Lobster graph: ', f
		return graphs
Esempio n. 36
0
def save_crowds(crowds):
    """
    save crowds to mongo
    """
    for crowd_ in crowds:
        crowd = json_graph.adjacency_graph(crowd_)
        c = models.Crowd(
                _id = crowd.graph['id'],
                loc = crowd.graph['loc'],
                zoom = crowd.graph['zoom'],
                edges = crowd.edges(),
                uids = crowd.nodes(),
            )
        c.save()
    crowd_col = models.Crowd.database.Crowd
    crowd_col.ensure_index([('mloc','2d'),('zoom',1)],bits=20)
    crowd_col.ensure_index('zoom')
Esempio n. 37
0
    def deserialize(cls, data):
        g = json_graph.adjacency_graph(data, directed=True, multigraph=True)

        return cls(graph=g)
Esempio n. 38
0
 def from_json(cls, json):
   state = cls()
   state.network = json_graph.adjacency_graph(json, directed=True)
   return state
def clean_json_adj_loads(json_str):
    json_data = json.loads(json_str)
    H = json_graph.adjacency_graph(json_data)
    for edge_here in H.edges():
        del(H[edge_here[0]][edge_here[1]]["id"])
    return H
def json_graph_list_loads(json_string):
    js_graph_list = json.loads(json_string)
    return [remove_id_from_json_graphs(json_graph.adjacency_graph(js_graph)) for js_graph in js_graph_list]
def json_graph_list_load(file):
    with open_filename(file,'r') as f:
        js_graph_list = json.load(f)
    return [remove_id_from_json_graphs(json_graph.adjacency_graph(js_graph)) for js_graph in js_graph_list]