예제 #1
0
 def test_data_types(self):
     data = [
         True, False, 10**20, -2e33, "'", '"&&&""',
         [{
             (b'\xfd', ): '\x7f',
             unichr(0x4444): (1, 2)
         }, (2, "3")]
     ]
     try:
         data.append(unichr(0x14444))  # fails under IronPython
     except ValueError:
         data.append(unichr(0x1444))
     try:
         data.append(
             literal_eval('{2.3j, 1 - 2.3j, ()}'))  # fails under Python 2.7
     except ValueError:
         data.append([2.3j, 1 - 2.3j, ()])
     G = nx.Graph()
     G.name = data
     G.graph['data'] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert_equal(data, G.name)
     assert_equal({'name': data, unicode('data'): data}, G.graph)
     assert_equal(list(G.nodes(data=True)),
                  [(0, dict(int=-1, data=dict(data=data)))])
     assert_equal(list(G.edges(data=True)),
                  [(0, 0, dict(float=-2.5, data=data))])
     G = nx.Graph()
     G.graph['data'] = 'frozenset([1, 2, 3])'
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert_equal(G.graph['data'], 'frozenset([1, 2, 3])')
예제 #2
0
 def test_data_types(self):
     data = [True, False, 10 ** 20, -2e33, "'", '"&&&""',
             [{(b'\xfd',): '\x7f', unichr(0x4444): (1, 2)}, (2, "3")]]
     try:
         data.append(unichr(0x14444))  # fails under IronPython
     except ValueError:
         data.append(unichr(0x1444))
     try:
         data.append(literal_eval('{2.3j, 1 - 2.3j, ()}'))  # fails under Python 2.7
     except ValueError:
         data.append([2.3j, 1 - 2.3j, ()])
     G = nx.Graph()
     G.name = data
     G.graph['data'] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert_equal(data, G.name)
     assert_equal({'name': data, unicode('data'): data}, G.graph)
     assert_equal(list(G.nodes(data=True)),
                  [(0, dict(int=-1, data=dict(data=data)))])
     assert_equal(list(G.edges(data=True)), [(0, 0, dict(float=-2.5, data=data))])
     G = nx.Graph()
     G.graph['data'] = 'frozenset([1, 2, 3])'
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert_equal(G.graph['data'], 'frozenset([1, 2, 3])')
예제 #3
0
 def test_data_types(self):
     data = [
         True,
         False,
         10 ** 20,
         -2e33,
         "'",
         '"&&&""',
         [{(b"\xfd",): "\x7f", chr(0x4444): (1, 2)}, (2, "3")],
     ]
     try:  # fails under IronPython
         data.append(chr(0x14444))
     except ValueError:
         data.append(chr(0x1444))
     data.append(literal_eval("{2.3j, 1 - 2.3j, ()}"))
     G = nx.Graph()
     G.name = data
     G.graph["data"] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert data == G.name
     assert {"name": data, str("data"): data} == G.graph
     assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))]
     assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))]
     G = nx.Graph()
     G.graph["data"] = "frozenset([1, 2, 3])"
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert G.graph["data"] == "frozenset([1, 2, 3])"
예제 #4
0
    def test_label_kwarg(self):
        G = nx.parse_gml(self.simple_data, label='id')
        assert_equals(sorted(G.nodes), [1, 2, 3])
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert_equals(labels, ['Node 1', 'Node 2', 'Node 3'])

        G = nx.parse_gml(self.simple_data, label=None)
        assert_equals(sorted(G.nodes), [1, 2, 3])
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert_equals(labels, ['Node 1', 'Node 2', 'Node 3'])
예제 #5
0
    def test_label_kwarg(self):
        G = nx.parse_gml(self.simple_data, label="id")
        assert sorted(G.nodes) == [1, 2, 3]
        labels = [G.nodes[n]["label"] for n in sorted(G.nodes)]
        assert labels == ["Node 1", "Node 2", "Node 3"]

        G = nx.parse_gml(self.simple_data, label=None)
        assert sorted(G.nodes) == [1, 2, 3]
        labels = [G.nodes[n]["label"] for n in sorted(G.nodes)]
        assert labels == ["Node 1", "Node 2", "Node 3"]
예제 #6
0
    def test_label_kwarg(self):
        G = nx.parse_gml(self.simple_data, label='id')
        assert sorted(G.nodes) == [1, 2, 3]
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert labels == ['Node 1', 'Node 2', 'Node 3']

        G = nx.parse_gml(self.simple_data, label=None)
        assert sorted(G.nodes) == [1, 2, 3]
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert labels == ['Node 1', 'Node 2', 'Node 3']
예제 #7
0
    def test_label_kwarg(self):
        G = nx.parse_gml(self.simple_data, label='id')
        assert_equals(sorted(G.nodes), [1, 2, 3])
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert_equals(labels, ['Node 1', 'Node 2', 'Node 3'])

        G = nx.parse_gml(self.simple_data, label=None)
        assert_equals(sorted(G.nodes), [1, 2, 3])
        labels = [G.nodes[n]['label'] for n in sorted(G.nodes)]
        assert_equals(labels, ['Node 1', 'Node 2', 'Node 3'])
예제 #8
0
def get_football_graph_test():
    #from https://networkx.github.io/documentation/stable/auto_examples/graph/plot_football.html#sphx-glr-auto-examples-graph-plot-football-py
    try:  # Python 3.x
        import urllib.request as urllib
    except ImportError:  # Python 2.x
        import urllib
    import io
    import zipfile

    import matplotlib.pyplot as plt
    import networkx as nx

    url = "http://www-personal.umich.edu/~mejn/netdata/football.zip"

    sock = urllib.urlopen(url)  # open URL
    s = io.BytesIO(sock.read())  # read into BytesIO "file"
    sock.close()

    zf = zipfile.ZipFile(s)  # zipfile object
    txt = zf.read('football.txt').decode()  # read info file
    gml = zf.read('football.gml').decode()  # read gml data
    # throw away bogus first line with # from mejn files
    gml = gml.split('\n')[1:]
    G = nx.parse_gml(gml)  # parse gml data

    return Test(G, 'football', 10, G.number_of_nodes(), -1)
예제 #9
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = "graph ["
                if directed is not None:
                    gml += " directed " + str(int(directed))
                if multigraph is not None:
                    gml += " multigraph " + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += " edge [ source 0 target 0 ]"
                gml += " ]"
                G = nx.parse_gml(gml)
                assert bool(directed) == G.is_directed()
                assert bool(multigraph) == G.is_multigraph()
                gml = "graph [\n"
                if directed is True:
                    gml += "  directed 1\n"
                if multigraph is True:
                    gml += "  multigraph 1\n"
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += "    key 0\n"
                gml += "  ]\n]"
                assert gml == "\n".join(nx.generate_gml(G))
예제 #10
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = 'graph ['
                if directed is not None:
                    gml += ' directed ' + str(int(directed))
                if multigraph is not None:
                    gml += ' multigraph ' + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += ' edge [ source 0 target 0 ]'
                gml += ' ]'
                G = nx.parse_gml(gml)
                assert_equal(bool(directed), G.is_directed())
                assert_equal(bool(multigraph), G.is_multigraph())
                gml = 'graph [\n'
                if directed is True:
                    gml += '  directed 1\n'
                if multigraph is True:
                    gml += '  multigraph 1\n'
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += '    key 0\n'
                gml += '  ]\n]'
                assert_equal(gml, '\n'.join(nx.generate_gml(G)))
예제 #11
0
def get_G(community='football'):
    if community == 'karate':
        G = karate_club_graph()
        nodes = sorted(list(G.nodes()))
        A = to_numpy_matrix(G, nodelist=nodes)
        labels = [[1, 0] if G.nodes[i]['club'] == 'Officer' else [0, 1]
                  for i in nodes]
        label_dict = {0: 'Officer', 1: 'Mr. Hi'}
    elif community == 'football':
        url = "http://www-personal.umich.edu/~mejn/netdata/football.zip"
        sock = urllib.urlopen(url)
        s = io.BytesIO(sock.read())
        sock.close()
        zf = zipfile.ZipFile(s)
        gml = zf.read("football.gml").decode()
        gml = gml.split("\n")[1:]
        G = nx.parse_gml(gml)
        nodes = np.unique(G.nodes)
        L_nodes = len(nodes)
        A_matrix = pd.DataFrame(np.zeros([L_nodes, L_nodes]),
                                columns=nodes,
                                index=nodes)
        for ri, ci in G.edges:
            A_matrix.loc[ri, ci] += 1
        A = (A_matrix + A_matrix.T).values
        Label = []
        for ni in nodes:
            Label.append(G.nodes[ni]['value'])
        labels = tf.one_hot(Label, np.unique(Label).shape[0])
        label_dict = {}
        for li, ni in zip(Label, nodes):
            label_dict[li] = ni
    else:
        print("No this community.")
    return A, labels, label_dict
예제 #12
0
    def test_parse_gml(self):
        G = nx.parse_gml(self.simple_data, label="label")
        assert sorted(G.nodes()) == ["Node 1", "Node 2", "Node 3"]
        assert [e for e in sorted(G.edges())] == [
            ("Node 1", "Node 2"),
            ("Node 2", "Node 3"),
            ("Node 3", "Node 1"),
        ]

        assert [e for e in sorted(G.edges(data=True))] == [
            (
                "Node 1",
                "Node 2",
                {
                    "color": {
                        "line": "blue",
                        "thickness": 3
                    },
                    "label": "Edge from node 1 to node 2",
                },
            ),
            ("Node 2", "Node 3", {
                "label": "Edge from node 2 to node 3"
            }),
            ("Node 3", "Node 1", {
                "label": "Edge from node 3 to node 1"
            }),
        ]
예제 #13
0
def read_graphs(filename,path=""):

	graphs = []
	#Read graphs file	
	with cm.cd(path):
		with open(filename,"r") as input_graphs:
			str_graphs = input_graphs.read().split("#")[:-1]

	#Generate graphs
	graphs = [nx.parse_gml(i) for i in str_graphs]

	#Generate node and edge label sets
	node_labels = set()
	edge_labels = set()

	for g in graphs:
		for label in list(nx.get_node_attributes(g,'type').values()):
			node_labels.add(label)
		for label in list(nx.get_edge_attributes(g,'type').values()):
			edge_labels.add(label)

	node_labels = sorted(list(node_labels))
	edge_labels = sorted(list(edge_labels))
	
	edge_labels = cm.fill_label_set(edge_labels)
	node_labels = cm.fill_label_set(node_labels)
	
	for g in graphs:
		g.graph['node_map'] = {k:v for v,k in enumerate(sorted(g.nodes()))}
	
	return graphs,node_labels,edge_labels
예제 #14
0
def predict_veracity(ego_net, ego, strategy='katz'):

    query_graph = nx.parse_gml(ego_net).to_undirected()
    # Check if evidence is available
    common_nodes = np.intersect1d(np.array(query_graph.nodes),
                                  np.array(evidence_graph.nodes))
    if np.isin(ego, evidence_nodes.name):
        result = {ego: evidence_nodes.value[evidence_nodes.name == ego]}

    if common_nodes.size > 0:
        combined_graph = nx.compose(query_graph,
                                    evidence_graph).subgraph(common_nodes)

        if strategy == 'katz':
            veracity_predicted = predict_veracity_truncated_katz(
                combined_graph, evidence_nodes)
        else:
            veracity_predicted = predict_veracity_collective_regression(
                combined_graph, evidence_nodes, 1)

        result = veracity_predicted[ego]
    else:
        result = "No connection with evidence graph."

    return jsonify(result)
예제 #15
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = 'graph ['
                if directed is not None:
                    gml += ' directed ' + str(int(directed))
                if multigraph is not None:
                    gml += ' multigraph ' + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += ' edge [ source 0 target 0 ]'
                gml += ' ]'
                G = nx.parse_gml(gml)
                assert_equal(bool(directed), G.is_directed())
                assert_equal(bool(multigraph), G.is_multigraph())
                gml = 'graph [\n'
                if directed is True:
                    gml += '  directed 1\n'
                if multigraph is True:
                    gml += '  multigraph 1\n'
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += '    key 0\n'
                gml += '  ]\n]'
                assert_equal(gml, '\n'.join(nx.generate_gml(G)))
예제 #16
0
def main():
    global G
    slave_ip = "172.31.13.227"
    slave_port = 8200
    slave = servernode(slave_ip,slave_port)
    slave.register_function(updateMin)
    slave.start()
    

    master_ip = "172.31.13.224"
    master_port = 8100
    url = "http://{}:{}".format(master_ip, master_port)
    master = xmlrpclib.ServerProxy(url,verbose =True)
    try:
        strGraph = master.getWholeGraph()
        G = nx.parse_gml(strGraph)
        print "Made it"
        x = master.getGraph()

        while (x != None):
            print "in While"
            minTour(G,x)    
            print "First subgraph complete"
            x = master.getGraph
        slave.stop_server()
    except KeyboardInterrupt:
        print "Blah"
        slave.stop_server()
예제 #17
0
 def parse(str_repr):
     """Parses network from string representation"""
     lines = str_repr.splitlines()
     ins = int(lines[0].split()[1])
     outs = int(lines[1].split()[1])
     graph = nx.parse_gml(lines[2:])
     return NeuralNetwork.from_graph(ins=ins, outs=outs, graph=graph)
예제 #18
0
def open_file_and_parse():
    with open('datasets/power.gml') as file:
        gml = file.read()
    gml = gml.split('\n')[1:]
    file.close()
    graph = nx.parse_gml(gml, label='id')
    return graph
예제 #19
0
    def __init__(self,
                 treatment_name, outcome_name,
                 graph=None,
                 common_cause_names=None,
                 instrument_names=None,
                 effect_modifier_names=None,
                 observed_node_names=None,
                 missing_nodes_as_confounders=False):
        self.treatment_name = parse_state(treatment_name)
        self.outcome_name = parse_state(outcome_name)
        instrument_names = parse_state(instrument_names)
        common_cause_names = parse_state(common_cause_names)
        self.logger = logging.getLogger(__name__)

        if graph is None:
            self._graph = nx.DiGraph()
            self._graph = self.build_graph(common_cause_names,
                                           instrument_names, effect_modifier_names)
        elif re.match(r".*\.dot", graph):
            # load dot file
            try:
                import pygraphviz as pgv
                self._graph = nx.DiGraph(nx.drawing.nx_agraph.read_dot(graph))
            except Exception as e:
                self.logger.error("Pygraphviz cannot be loaded. " + str(e) + "\nTrying pydot...")
                try:
                    import pydot
                    self._graph = nx.DiGraph(nx.drawing.nx_pydot.read_dot(graph))
                except Exception as e:
                    self.logger.error("Error: Pydot cannot be loaded. " + str(e))
                    raise e
        elif re.match(r".*\.gml", graph):
            self._graph = nx.DiGraph(nx.read_gml(graph))
        elif re.match(r".*graph\s*\{.*\}\s*", graph):
            try:
                import pygraphviz as pgv
                self._graph = pgv.AGraph(graph, strict=True, directed=True)
                self._graph = nx.drawing.nx_agraph.from_agraph(self._graph)
            except Exception as e:
                self.logger.error("Error: Pygraphviz cannot be loaded. " + str(e) + "\nTrying pydot ...")
                try:
                    import pydot
                    P_list = pydot.graph_from_dot_data(graph)
                    self._graph = nx.drawing.nx_pydot.from_pydot(P_list[0])
                except Exception as e:
                    self.logger.error("Error: Pydot cannot be loaded. " + str(e))
                    raise e
        elif re.match(".*graph\s*\[.*\]\s*", graph):
            self._graph = nx.DiGraph(nx.parse_gml(graph))
        else:
            self.logger.error("Error: Please provide graph (as string or text file) in dot or gml format.")
            self.logger.error("Error: Incorrect graph format")
            raise ValueError
        if missing_nodes_as_confounders:
            self._graph = self.add_missing_nodes_as_common_causes(observed_node_names)
        # Adding node attributes
        self._graph = self.add_node_attributes(observed_node_names)
        #TODO do not add it here. CausalIdentifier should call causal_graph to add an unobserved common cause if needed. This also ensures that we do not need get_common_causes in this class.
        self._graph = self.add_unobserved_common_cause(observed_node_names)
 def convert_to_nx(graph_):
     try:
         graph = nx.parse_gml('\n'.join(graph_str_lines))
         if undirected:
             graph = graph.to_undirected()
         return graph
     except nx.NetworkXError as e:
         return e
def get_pol_data():
    path = './data/polblogs.zip'
    file = zipfile.ZipFile(path)
    gml = file.read('polblogs.gml').decode()  # read gml data
    # throw away bogus first line with # from mejn files
    gml = gml.split('\n')[1:]
    G = networkx.parse_gml(gml)  # parse gml data
    return G
예제 #22
0
def gml2json(filename):
    file = seekFile.seekFile(filename)
    g = nx.parse_gml(file)
    # g = nx.read_gml(filename)

    newFilename = filename.split('.')[0] + ".gml.json"

    newFile = {}
    newFile["topo"] = [{"timeSlice": 0, "describe": []}]
    # 节点统计
    nodes = {}
    # 统计各节点端口
    allPorts = {}

    # 处理gml,得到edges列表,每一项都是一个字典,内含"source"和"target"
    edges = []
    nodes_id = dict()
    # nodes_label = dict()
    for id, label in enumerate(g.nodes()):
        nodes_id[label] = id
    for (v0, v1) in g.edges():
        edges.append({
            'source': str(nodes_id[v1]),
            'target': str(nodes_id[v0])
        })

    for i in edges:
        if i['source'] not in nodes:
            nodes[i['source']] = {"LeoID": int(i['source']), "neighbor": []}
            allPorts[i['source']] = 0
        if i['target'] not in nodes:
            nodes[i['target']] = {"LeoID": int(i['target']), "neighbor": []}
            allPorts[i['target']] = 0
    # print(nodes)
    for i in edges:
        allPorts[i['source']] = allPorts[i['source']] + 1
        allPorts[i['target']] = allPorts[i['target']] + 1
        nodes[i['source']]["neighbor"].append({
            "LocalPort":
            allPorts[i['source']],
            "NbID":
            int(i['target']),
            "NbPort":
            allPorts[i['target']]
        })
        nodes[i['target']]["neighbor"].append({
            "LocalPort":
            allPorts[i['target']],
            "NbID":
            int(i['source']),
            "NbPort":
            allPorts[i['source']]
        })
    for i in nodes.values():
        newFile["topo"][0]["describe"].append(i)

    seekFile.uploadFileWithName(newFile, newFilename)
    return newFilename
예제 #23
0
    def __init__(self, filename, trackid):
        self._outdb = pickledb.load(filename, False)
        self._trackid = trackid
        tmpgraph = nx.parse_gml(self._outdb.get(trackid + '_0'))
        self._pos = nx.random_layout(tmpgraph)

        nkeys = self._outdb.totalkeys() - 1
        snapshot_rate = self._outdb.get('_params')['snapshot_rate']
        self._snapshots = range(0, nkeys * snapshot_rate, snapshot_rate)
예제 #24
0
def __gml_to_nx(obj: str) -> nx.Graph:
    el_count = defaultdict(int)
    group_charges = dict()

    graph = nx.parse_gml(obj)
    cp = nx.convert_node_labels_to_integers(graph, first_label=1, label_attribute='label')
    if 'name' in graph.graph:
        cp.graph['name'] = graph.graph['name']
    else:
        del cp.graph['name']

    for v, data in cp.nodes.data():
        if not isinstance(v, int):
            raise ValueError('id {0} is not int.'.format(v))
        if 'partialcharge' in data:
            data['partial_charge'] = data.pop('partialcharge')
        if not 'atomtype' in data:
            raise ValueError('Missing attribute "atomtype" for atom {0}'.format(v))
        data['iacm'] = data.pop('atomtype')
        if not data['iacm'] in IACM_ELEMENTS:
            raise ValueError('Unknown "atom_type" for atom {0}: {1}'.format(v, data['atom_type']))
        element = IACM_MAP[data['iacm']]
        data['atom_type'] = element
        el_count[element] += 1
        if not 'label' in data or not isinstance('label', str):
            data['label'] = '%s%d' % (element, el_count[element])
        if not 'chargegroup' in data:
            data['charge_group'] = 0
        else:
            data['charge_group'] = data.pop('chargegroup')
        group_charges[data['charge_group']] = 0.0

    for _, _, data in cp.edges.data():
        if not 'bondtype' in data:
            data['bond_type'] = BondType.UNKNOWN
        else:
            try:
                data['bond_type'] = BondType(data.pop('bondtype'))
            except:
                del data['bondtype']
                data['bond_type'] = BondType.UNKNOWN

    for k in list(cp.graph.keys()):
        m = re.fullmatch(r'groupcharge(\d+)', k)
        if m:
            group_idx = int(m.group(1))
            if group_idx in group_charges:
                group_charges[group_idx] = float(cp.graph[k])
            del cp.graph[k]

    for _, data in cp.nodes.data():
        if not data['charge_group'] in group_charges:
            group_charges[data['charge_group']] = 0.0
    cp.graph['group_charges'] = group_charges

    return cp
예제 #25
0
    def __init__(self,
                 treatment_name, outcome_name,
                 graph=None,
                 common_cause_names=None,
                 instrument_names=None,
                 observed_node_names=None):
        self.treatment_name = treatment_name
        self.outcome_name = outcome_name
        self.fullname = "_".join([self.treatment_name,
                                  self.outcome_name,
                                  str(common_cause_names),
                                  str(instrument_names)])
        if graph is None:
            self._graph = nx.DiGraph()
            self._graph = self.build_graph(common_cause_names,
                                           instrument_names)
        elif re.match(r".*\.dot", graph):
            # load dot file
            try:
                import pygraphviz as pgv
                self._graph = nx.DiGraph(nx.drawing.nx_agraph.read_dot(graph))
            except Exception as e:
                print("Pygraphviz cannot be loaded. " + str(e) + "\nTrying pydot...")
                try:
                    import pydot
                    self._graph = nx.DiGraph(nx.drawing.nx_pydot.read_dot(graph))
                except Exception as e:
                    print("Error: Pydot cannot be loaded. " + str(e))
                    raise e
        elif re.match(r".*\.gml", graph):
            self._graph = nx.DiGraph(nx.read_gml(graph))
        elif re.match(r".*graph\s*\{.*\}\s*", graph):
            try:
                import pygraphviz as pgv
                self._graph = pgv.AGraph(graph, strict=True, directed=True)
                self._graph = nx.drawing.nx_agraph.from_agraph(self._graph)
            except Exception as e:
                print("Error: Pygraphviz cannot be loaded. " + str(e) + "\nTrying pydot ...")
                try:
                    import pydot
                    P_list = pydot.graph_from_dot_data(graph)
                    self._graph = nx.drawing.nx_pydot.from_pydot(P_list[0])
                except Exception as e:
                    print("Error: Pydot cannot be loaded. " + str(e))
                    raise e
        elif re.match(".*graph\s*\[.*\]\s*", graph):
            self._graph = nx.DiGraph(nx.parse_gml(graph))
        else:
            print("Error: Please provide graph (as string or text file) in dot or gml format.")
            print("Error: Incorrect graph format")
            raise ValueError

        self._graph = self.add_node_attributes(observed_node_names)
        self._graph = self.add_unobserved_common_cause(observed_node_names)
        self.logger = logging.getLogger(__name__)
예제 #26
0
 def test_read_gml(self):
     (fd, fname) = tempfile.mkstemp()
     fh = open(fname, 'w')
     fh.write(self.simple_data)
     fh.close()
     Gin = nx.read_gml(fname, label='label')
     G = nx.parse_gml(self.simple_data, label='label')
     assert_equals(sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True)))
     assert_equals(sorted(G.edges(data=True)), sorted(Gin.edges(data=True)))
     os.close(fd)
     os.unlink(fname)
예제 #27
0
 def test_read_gml(self):
     (fd, fname) = tempfile.mkstemp()
     fh = open(fname, "w")
     fh.write(self.simple_data)
     fh.close()
     Gin = nx.read_gml(fname, label="label")
     G = nx.parse_gml(self.simple_data, label="label")
     assert sorted(G.nodes(data=True)) == sorted(Gin.nodes(data=True))
     assert sorted(G.edges(data=True)) == sorted(Gin.edges(data=True))
     os.close(fd)
     os.unlink(fname)
예제 #28
0
 def test_parse_gml(self):
     G=networkx.parse_gml(self.simple_data)
     assert_equals(sorted(G.nodes()),\
                       ['Node 1', 'Node 2', 'Node 3'])
     assert_equals( [e for e in sorted(G.edges(data=True))],\
                        [('Node 1', 'Node 2', 
                          {'color': {'line': 'blue', 'thickness': 3},
                           'label': 'Edge from node 1 to node 2'}), 
                         ('Node 2', 'Node 3', 
                          {'label': 'Edge from node 2 to node 3'}), 
                         ('Node 3', 'Node 1', {'label': 'Edge from node 3 to node 1'})])
예제 #29
0
 def test_read_gml(self):
     (fd, fname) = tempfile.mkstemp()
     fh = open(fname, 'w')
     fh.write(self.simple_data)
     fh.close()
     Gin = nx.read_gml(fname, label='label')
     G = nx.parse_gml(self.simple_data, label='label')
     assert_equals(sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True)))
     assert_equals(sorted(G.edges(data=True)), sorted(Gin.edges(data=True)))
     os.close(fd)
     os.unlink(fname)
def convert_to_json(data):
    """This function handles the parsing, cleaning, and conversion process.
    """
    gml_data = nx.parse_gml(data, label='id')
    json_data = json_graph.node_link_data(gml_data)
    clean_json(json_data)
    return json.dumps(
        {
            'nodes': json_data['nodes'],
            'links': json_data['links']
        }, indent=2)
예제 #31
0
def load_clics():
    """Load CLICS as networkx Graph.

    Lexedata packages the CLICS colexification graph in GML format from
    https://zenodo.org/record/3687530/files/clics/clics3-v1.1.zip?download=1

    """
    gml = zipfile.ZipFile(
        pkg_resources.resource_stream("lexedata",
                                      "data/clics3-network.gml.zip")).open(
                                          "graphs/network-3-families.gml", "r")
    return networkx.parse_gml(line.decode("utf-8") for line in gml)
예제 #32
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&"䑄��&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert_equal(
            '&"\x0f' + unichr(0x4444) + '��&unknown;',
            G.name)
        gml = '\n'.join(nx.generate_gml(G))
        assert_equal("""graph [
  name "&"䑄��&unknown;"
]""", gml)
def parse_complexes(labels, path_input_graphs, prefix_for_output_gmls,
                    output_file):
    """ Parse the complexes for each label and write a single gml file as well
        as some stats.
    """
    filenames_to_numbers = defaultdict(list)
    for l in labels:
        filename = "_".join(l.split("_")[:4])
        graph_number = int(l.split("_")[-1])
        filenames_to_numbers[filename].append(graph_number)

    output = open(output_file, "w")

    for filename in filenames_to_numbers:
        current_file = open(
            path_input_graphs + filename[:-4] + ".nx.gml",
            "r")  # .nx.gml because of duplication for renaming, see below
        count = -1
        lines = []
        current_graphs = sorted(filenames_to_numbers[filename])
        i = 0
        current_graph = current_graphs[i]
        for line in current_file:
            if line.strip("\n") == "graph [":
                count += 1
            if count == current_graph:
                lines.append(line)
            else:
                if lines != []:
                    graph = nx.parse_gml(lines)
                    path = prefix_for_output_gmls + "{}_{}".format(
                        filename, current_graph)
                    nx.write_gml(graph, path + ".nx.gml")
                    os.system(
                        "sed '/label/d' {0}.nx.gml | sed \"s/name/label/\" > {0}.gml"
                        .format(path))
                    proteinnames = sorted(
                        list(nx.get_node_attributes(graph, 'name').values()))
                    print("{}_{}".format(filename, current_graph),
                          graph.number_of_nodes(),
                          graph.number_of_edges(),
                          proteinnames,
                          sep="\t",
                          file=output)
                    lines = []
                    i += 1
                    if i < len(current_graphs):
                        current_graph = current_graphs[i]
                        if count == current_graph:
                            lines.append(line)
                    else:
                        break
    output.close()
예제 #34
0
    def __init__(self, filename, trackid):
        self._outdb = pickledb.load(filename, False)
        self._trackid = str(trackid)

        nkeys = self._outdb.totalkeys() - 1
        snapshot_rate = self._outdb.get('_params')['snapshot_rate']
        last_snapshot = nkeys * snapshot_rate
        self._snapshots = range(0, last_snapshot, snapshot_rate)

        graphid = trackid + '_' + str(last_snapshot - snapshot_rate)
        tmpgraph = nx.parse_gml(self._outdb.get(graphid))
        self._pos = nx.spring_layout(tmpgraph)
예제 #35
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&amp;&#34;&#xf;&#x4444;&#1234567890;&#x1234567890abcdef;&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert_equal(
            '&"\x0f' + unichr(0x4444) + '&#1234567890;&#x1234567890abcdef;&unknown;',
            G.name)
        gml = '\n'.join(nx.generate_gml(G))
        assert_equal("""graph [
  name "&#38;&#34;&#15;&#17476;&#38;#1234567890;&#38;#x1234567890abcdef;&#38;unknown;"
]""", gml)
예제 #36
0
    def test_read_gml(self):
        import os, tempfile

        (fd, fname) = tempfile.mkstemp()
        fh = open(fname, "w")
        fh.write(self.simple_data)
        fh.close()
        Gin = networkx.read_gml(fname, relabel=True)
        G = networkx.parse_gml(self.simple_data, relabel=True)
        assert_equals(sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True)))
        assert_equals(sorted(G.edges(data=True)), sorted(Gin.edges(data=True)))
        os.close(fd)
        os.unlink(fname)
예제 #37
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&amp;&#34;&#xf;&#x4444;&#1234567890;&#x1234567890abcdef;&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert ('&"\x0f' + chr(0x4444) +
                "&#1234567890;&#x1234567890abcdef;&unknown;" == G.name)
        gml = "\n".join(nx.generate_gml(G))
        alnu = "#1234567890;&#38;#x1234567890abcdef"
        answer = ("""graph [
  name "&#38;&#34;&#15;&#17476;&#38;""" + alnu + """;&#38;unknown;"
]""")
        assert answer == gml
def _parse_networkmodel(path):
    network_slices = dict()

    zf = zipfile.ZipFile(path, 'r')

    for file in [f for f in zf.namelist() if f.endswith(".gml")]:
        m = re.search('(?!\-)(\d+)\.gml', file)
        file_number = m.group(1)
        log.debug("Parsing GML file %s:  file number %s", file, file_number)

        gml = zf.read(file)
        slice = nx.parse_gml(gml)
        #log.debug("slice nodes: %s", '|'.join(sorted(slice.nodes())))
        network_slices[int(file_number)] = slice

    return network_slices
예제 #39
0
    def test_parse_gml(self):
        G = networkx.parse_gml(self.simple_data, relabel=True)
        assert_equals(sorted(G.nodes()), ["Node 1", "Node 2", "Node 3"])
        assert_equals(
            [e for e in sorted(G.edges())], [("Node 1", "Node 2"), ("Node 2", "Node 3"), ("Node 3", "Node 1")]
        )

        assert_equals(
            [e for e in sorted(G.edges(data=True))],
            [
                (
                    "Node 1",
                    "Node 2",
                    {"color": {"line": "blue", "thickness": 3}, "label": "Edge from node 1 to node 2"},
                ),
                ("Node 2", "Node 3", {"label": "Edge from node 2 to node 3"}),
                ("Node 3", "Node 1", {"label": "Edge from node 3 to node 1"}),
            ],
        )
예제 #40
0
def parse_gml_and_normalize_floats(slice_lines):
    """
    Read a slice GML line by line, looking for scientific notation
    and when found, normalize it using the Decimal library.
    Then pass the lines of text to networkx parse_gml to
    parse it.  This is a drop-in replacement for read_gml()
    """
    exp_regex = compile(r"(\d+(\.\d+)?)[Ee](\+|-)(\d+)")

    input_lines = []


    for line in slice_lines:
        result = exp_regex.search(line)
        if result is not None:
            matched_value = result.group(0)
            replacement = str(remove_exponent(Decimal(float(matched_value))))
            line = line.replace(matched_value, replacement)
            #log.debug("Replacing %s with %s", matched_value, replacement)

        input_lines.append(line)

    return nx.parse_gml(input_lines)
예제 #41
0
    def test_parse_gml_cytoscape_bug(self):
        # example from issue #321, originally #324 in trac
        cytoscape_example = """
Creator "Cytoscape"
Version 1.0
graph   [
    node    [
        root_index  -3
        id  -3
        graphics    [
            x   -96.0
            y   -67.0
            w   40.0
            h   40.0
            fill    "#ff9999"
            type    "ellipse"
            outline "#666666"
            outline_width   1.5
        ]
        label   "node2"
    ]
    node    [
        root_index  -2
        id  -2
        graphics    [
            x   63.0
            y   37.0
            w   40.0
            h   40.0
            fill    "#ff9999"
            type    "ellipse"
            outline "#666666"
            outline_width   1.5
        ]
        label   "node1"
    ]
    node    [
        root_index  -1
        id  -1
        graphics    [
            x   -31.0
            y   -17.0
            w   40.0
            h   40.0
            fill    "#ff9999"
            type    "ellipse"
            outline "#666666"
            outline_width   1.5
        ]
        label   "node0"
    ]
    edge    [
        root_index  -2
        target  -2
        source  -1
        graphics    [
            width   1.5
            fill    "#0000ff"
            type    "line"
            Line    [
            ]
            source_arrow    0
            target_arrow    3
        ]
        label   "DirectedEdge"
    ]
    edge    [
        root_index  -1
        target  -1
        source  -3
        graphics    [
            width   1.5
            fill    "#0000ff"
            type    "line"
            Line    [
            ]
            source_arrow    0
            target_arrow    3
        ]
        label   "DirectedEdge"
    ]
]
"""
        nx.parse_gml(cytoscape_example)
예제 #42
0
    def test_exceptions(self):
        assert_raises(ValueError, literal_destringizer, '(')
        assert_raises(ValueError, literal_destringizer, 'frozenset([1, 2, 3])')
        assert_raises(ValueError, literal_destringizer, literal_destringizer)
        assert_raises(ValueError, literal_stringizer, frozenset([1, 2, 3]))
        assert_raises(ValueError, literal_stringizer, literal_stringizer)
        with tempfile.TemporaryFile() as f:
            f.write(codecs.BOM_UTF8 + 'graph[]'.encode('ascii'))
            f.seek(0)
            assert_raises(nx.NetworkXError, nx.read_gml, f)

        def assert_parse_error(gml):
            assert_raises(nx.NetworkXError, nx.parse_gml, gml)

        assert_parse_error(['graph [\n\n', unicode(']')])
        assert_parse_error('')
        assert_parse_error('Creator ""')
        assert_parse_error('0')
        assert_parse_error('graph ]')
        assert_parse_error('graph [ 1 ]')
        assert_parse_error('graph [ 1.E+2 ]')
        assert_parse_error('graph [ "A" ]')
        assert_parse_error('graph [ ] graph ]')
        assert_parse_error('graph [ ] graph [ ]')
        assert_parse_error('graph [ data [1, 2, 3] ]')
        assert_parse_error('graph [ node [ ] ]')
        assert_parse_error('graph [ node [ id 0 ] ]')
        nx.parse_gml('graph [ node [ id "a" ] ]', label='id')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] node [ id 0 label 1 ] ]')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 0 ] ]')
        assert_parse_error('graph [ node [ id 0 label 0 ] edge [ ] ]')
        assert_parse_error('graph [ node [ id 0 label 0 ] edge [ source 0 ] ]')
        nx.parse_gml(
            'graph [edge [ source 0 target 0 ] node [ id 0 label 0 ] ]')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] edge [ source 1 target 0 ] ]')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] edge [ source 0 target 1 ] ]')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 ] edge [ source 1 target 0 ] ]')
        nx.parse_gml(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 ] edge [ source 1 target 0 ] '
            'directed 1 ]')
        nx.parse_gml(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 ] edge [ source 0 target 1 ]'
            'multigraph 1 ]')
        nx.parse_gml(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 ]'
            'multigraph 1 ]')
        assert_parse_error(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 key 0 ]'
            'multigraph 1 ]')
        nx.parse_gml(
            'graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] '
            'edge [ source 0 target 1 key 0 ] edge [ source 1 target 0 key 0 ]'
            'directed 1 multigraph 1 ]')

        def assert_generate_error(*args, **kwargs):
            assert_raises(nx.NetworkXError,
                          lambda: list(nx.generate_gml(*args, **kwargs)))

        G = nx.Graph()
        G.graph[3] = 3
        assert_generate_error(G)
        G = nx.Graph()
        G.graph['3'] = 3
        assert_generate_error(G)
        G = nx.Graph()
        G.graph['data'] = frozenset([1, 2, 3])
        assert_generate_error(G, stringizer=literal_stringizer)
        G = nx.Graph()
        G.graph['data'] = []
        assert_generate_error(G)
        assert_generate_error(G, stringizer=len)
예제 #43
0
파일: util.py 프로젝트: zaktan8/GCP
def get_graph_from_gml(file_name, label='label'):
    correct_gml = re.sub(r'\s+\[', ' [', open(file_name).read())
    return networkx.parse_gml(correct_gml, label)
예제 #44
0
#remove additional route_type infos
string = re.sub(r'\s+route\_type [0-9]+', '', string)
string = re.sub(r'\s+agency None', '', string)
string = re.sub(r'\s+weight None', '', string)

##XML read string by xmlcharrefreplace -> unicode, replace all non-ascii characters
#from HTMLParser import HTMLParser
#parser = HTMLParser()
#string = parser.unescape(string)
#string = string.encode('ascii', 'ignore')

#replace labels by unique id
lbl_counter = count()
string = re.sub(r'label ".+"', lambda x: 'label "a%s"' %next(lbl_counter), string)

G = nx.parse_gml(string)
stream = stdout

stream.write('p tw %s %s\n' %(G.number_of_nodes(), G.number_of_edges()))
G = nx.convert_node_labels_to_integers(G, first_label=1)

text = r'''c
c Graphs generated from publicly available GTFS transit feeds by Johannes K. Fichte
c
c References
c [1] https://en.wikipedia.org/wiki/General_Transit_Feed_Specification or
c [2] https://developers.google.com/transit/gtfs/
c [3] https://github.com/daajoe/transit_graphs/blob/master/transitfeeds-tw.pdf
c [4] https://github.com/daajoe/gtfs2graphs
c
c GTFS feeds extracted using gtfs2graphs [4]
예제 #45
0
 def test_name(self):
     G = nx.parse_gml('graph [ name "x" node [ id 0 label "x" ] ]')
     assert_equal('x', G.graph['name'])
     G = nx.parse_gml('graph [ node [ id 0 label "x" ] ]')
     assert_equal('', G.name)
     assert_not_in('name', G.graph)
  
  input_json_filename= sys.argv[1]
  gml_filename= sys.argv[2]
  output_json_filename= sys.argv[3]

  json_object= json.loads(open(input_json_filename, "r").read())
  
  gml_string= open(gml_filename, "r").read()
  gml_string= gml_string.replace("graph\n", "graph \n")
  gml_string= gml_string.replace("node\n", "node \n")
  gml_string= gml_string.replace("edge\n", "edge \n")
  gml_string= re.sub("Creator Gephi\n", "", gml_string)
  gml_string= gml_string.replace("graphics\n", "graphics \n")
  
  open(gml_filename, "w").write(gml_string)
  gml_object= nx.parse_gml(gml_string)
  
  json_object["authors"]= map(int, gml_object.nodes())
  open(output_json_filename, "w").write(json.dumps(json_object, indent= 4))

  
  
    
    
        
    
      
    
  
    
    
예제 #47
0
import matplotlib.pyplot as plt
import networkx as nx

url = "http://www-personal.umich.edu/~mejn/netdata/football.zip"

sock = urllib.urlopen(url)  # open URL
s = io.BytesIO(sock.read())  # read into BytesIO "file"
sock.close()

zf = zipfile.ZipFile(s)  # zipfile object
txt = zf.read('football.txt').decode()  # read info file
gml = zf.read('football.gml').decode()  # read gml data
# throw away bogus first line with # from mejn files
gml = gml.split('\n')[1:]
G = nx.parse_gml(gml)  # parse gml data

print(txt)
# print degree for each team - number of games
for n, d in G.degree():
    print('%s %d' % (n, d))

options = {
    'node_color': 'black',
    'node_size': 50,
    'line_color': 'grey',
    'linewidths': 0,
    'width': 0.1,
}
nx.draw(G, **options)
plt.show()
예제 #48
0
파일: models.py 프로젝트: clics/clics-data
 def graph(self):
     def lines():
         for line in self.fname.open():
             yield line.encode('ascii', 'xmlcharrefreplace').decode('utf-8')
     return nx.parse_gml(''.join(lines()))