示例#1
0
 def test_data_types(self):
     data = [
         True,
         False,
         10 ** 20,
         -2e33,
         "'",
         '"&&&""',
         [{(b"\xfd",): "\x7f", chr(0x4444): (1, 2)}, (2, "3")],
     ]
     try:  # fails under IronPython
         data.append(chr(0x14444))
     except ValueError:
         data.append(chr(0x1444))
     data.append(literal_eval("{2.3j, 1 - 2.3j, ()}"))
     G = nx.Graph()
     G.name = data
     G.graph["data"] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert data == G.name
     assert {"name": data, str("data"): data} == G.graph
     assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))]
     assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))]
     G = nx.Graph()
     G.graph["data"] = "frozenset([1, 2, 3])"
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert G.graph["data"] == "frozenset([1, 2, 3])"
示例#2
0
 def test_data_types(self):
     data = [
         True, False, 10**20, -2e33, "'", '"&&&""',
         [{
             (b'\xfd', ): '\x7f',
             unichr(0x4444): (1, 2)
         }, (2, "3")]
     ]
     try:
         data.append(unichr(0x14444))  # fails under IronPython
     except ValueError:
         data.append(unichr(0x1444))
     try:
         data.append(
             literal_eval('{2.3j, 1 - 2.3j, ()}'))  # fails under Python 2.7
     except ValueError:
         data.append([2.3j, 1 - 2.3j, ()])
     G = nx.Graph()
     G.name = data
     G.graph['data'] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert_equal(data, G.name)
     assert_equal({'name': data, unicode('data'): data}, G.graph)
     assert_equal(list(G.nodes(data=True)),
                  [(0, dict(int=-1, data=dict(data=data)))])
     assert_equal(list(G.edges(data=True)),
                  [(0, 0, dict(float=-2.5, data=data))])
     G = nx.Graph()
     G.graph['data'] = 'frozenset([1, 2, 3])'
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert_equal(G.graph['data'], 'frozenset([1, 2, 3])')
示例#3
0
 def test_data_types(self):
     data = [True, False, 10 ** 20, -2e33, "'", '"&&&""',
             [{(b'\xfd',): '\x7f', unichr(0x4444): (1, 2)}, (2, "3")]]
     try:
         data.append(unichr(0x14444))  # fails under IronPython
     except ValueError:
         data.append(unichr(0x1444))
     try:
         data.append(literal_eval('{2.3j, 1 - 2.3j, ()}'))  # fails under Python 2.7
     except ValueError:
         data.append([2.3j, 1 - 2.3j, ()])
     G = nx.Graph()
     G.name = data
     G.graph['data'] = data
     G.add_node(0, int=-1, data=dict(data=data))
     G.add_edge(0, 0, float=-2.5, data=data)
     gml = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
     G = nx.parse_gml(gml, destringizer=literal_destringizer)
     assert_equal(data, G.name)
     assert_equal({'name': data, unicode('data'): data}, G.graph)
     assert_equal(list(G.nodes(data=True)),
                  [(0, dict(int=-1, data=dict(data=data)))])
     assert_equal(list(G.edges(data=True)), [(0, 0, dict(float=-2.5, data=data))])
     G = nx.Graph()
     G.graph['data'] = 'frozenset([1, 2, 3])'
     G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
     assert_equal(G.graph['data'], 'frozenset([1, 2, 3])')
示例#4
0
def __nx_to_gml(graph: nx.Graph) -> str:
    cp = graph.copy()
    el_count = defaultdict(int)

    for _, data in cp.nodes(data=True):
        element = IACM_MAP[data['iacm']]
        el_count[element] += 1
        if not 'label' in data or not isinstance('label', str):
            data['label'] = '%s%d' % (element, el_count[element])

    nx.relabel_nodes(cp, mapping=dict((v, data['label']) for v, data in cp.nodes.data()),  copy=False)

    for u, v, data in cp.edges(data=True):
        if 'rdkit_bond_type' in data:
            del data['rdkit_bond_type']
        if 'bond_type' in data and isinstance(data['bond_type'], BondType):
            data['bondtype'] = data.pop('bond_type').value

    for v, data in cp.nodes(data=True):
        if 'iacm' in data:
            data['atomtype'] = data.pop('iacm')
            del data['atom_type']
        else:
            data['atomtype'] = data.pop('atom_type')
        if 'charge_group' in data:
            data['chargegroup'] = data.pop('charge_group')
        if 'partial_charge' in data:
            data['partialcharge'] = data.pop('partial_charge')

    if 'group_charges' in cp.graph and isinstance(cp.graph['group_charges'], dict):
        for k, v in cp.graph['group_charges'].items():
            cp.graph['groupcharge%d' % k] = v
        del cp.graph['group_charges']

    return "\n".join(nx.generate_gml(cp))
示例#5
0
def gen_graphs(contacts, typeCode, gfilename="graphs.txt", path=""):

    graphs = []

    for pdbid, contacts in sorted(contacts.items()):
        graphs += gen_graph(contacts, pdbid, typeCode)

    for i in range(len(graphs)):
        graphs[i].graph["id"] = i

    graphs_string = ""
    graphs_string = "".join(
        ["".join(i for i in nx.generate_gml(g)) + "#" for g in graphs])

    #Write graphs file
    with (Path(path) / gfilename).open(mode="w") as gfile:
        gfile.write(graphs_string)

    #Generate node and edge label sets
    node_labels = set()
    edge_labels = set()
    for g in graphs:
        node_labels |= set(nx.get_node_attributes(g, 'type').values())
        edge_labels |= set(nx.get_edge_attributes(g, 'type').values())

    node_labels = sorted(node_labels)
    edge_labels = sorted(edge_labels)

    edge_labels = cm.fill_label_set(edge_labels)
    node_labels = cm.fill_label_set(node_labels)

    return graphs, node_labels, edge_labels
示例#6
0
    def generateGraph(self, ticket, bnglContents, graphtype):
        print ticket
        pointer = tempfile.mkstemp(suffix='.bngl', text=True)
        with open(pointer[1], 'w') as f:
            f.write(bnglContents)
        try:
            if graphtype in ['regulatory', 'contactmap']:
                consoleCommands.setBngExecutable(bngDistro)
                consoleCommands.generateGraph(pointer[1], graphtype)
                name = pointer[1].split('.')[0].split('/')[-1]
                with open('{0}_{1}.gml'.format(name, graphtype), 'r') as f:
                    graphContent = f.read()

                gml = networkx.read_gml('{0}_{1}.gml'.format(name, graphtype))
                result = gml2cyjson(gml, graphtype=graphtype)
                jsonStr = json.dumps(result, indent=1, separators=(',', ': '))

                result = {'jsonStr': jsonStr, 'gmlStr': graphContent}
                self.addToDict(ticket, result)
                os.remove('{0}_{1}.gml'.format(name, graphtype))
                print 'success', ticket

            elif graphtype in ['sbgn_er']:
                consoleCommands.setBngExecutable(bngDistro)
                consoleCommands.generateGraph(pointer[1], 'contactmap')
                name = pointer[1].split('.')[0].split('/')[-1]
                # with open('{0}_{1}.gml'.format(name,'contactmap'),'r') as f:
                #   graphContent = f.read()
                graphContent = networkx.read_gml(
                    '{0}_{1}.gml'.format(name, 'contactmap'))
                sbgn = libsbgn.createSBNG_ER_gml(graphContent)
                self.addToDict(ticket, sbgn)
                os.remove('{0}_{1}.gml'.format(name, 'contactmap'))
                print 'success', ticket
            elif graphtype in ['std']:
                consoleCommands.setBngExecutable(bngDistro)
                consoleCommands.bngl2xml(pointer[1])
                xmlFileName = pointer[1].split('.')[0] + '.xml'
                xmlFileName = xmlFileName.split(os.sep)[-1]

                graph = stdgraph.generateSTDGML(xmlFileName)
                gmlGraph = networkx.generate_gml(graph)


                #os.remove('{0}.gml'.format(xmlFileName))
                result = gml2cyjson(graph, graphtype=graphtype)
                jsonStr = json.dumps(result, indent=1, separators=(',', ': '))

                result = {'jsonStr': jsonStr, 'gmlStr': ''.join(gmlGraph)}

                #self.addToDict(ticket, ''.join(gmlGraph))
                self.addToDict(ticket, result)
                print 'success', ticket
        except:
            import traceback
            traceback.print_exc()
            self.addToDict(ticket,-5)
            print 'failure',ticket
        finally:
            task.deferLater(reactor, 600,  freeQueue, ticket)
示例#7
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = 'graph ['
                if directed is not None:
                    gml += ' directed ' + str(int(directed))
                if multigraph is not None:
                    gml += ' multigraph ' + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += ' edge [ source 0 target 0 ]'
                gml += ' ]'
                G = nx.parse_gml(gml)
                assert_equal(bool(directed), G.is_directed())
                assert_equal(bool(multigraph), G.is_multigraph())
                gml = 'graph [\n'
                if directed is True:
                    gml += '  directed 1\n'
                if multigraph is True:
                    gml += '  multigraph 1\n'
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += '    key 0\n'
                gml += '  ]\n]'
                assert_equal(gml, '\n'.join(nx.generate_gml(G)))
示例#8
0
def ato_write_gml(graph, fileName, labelGraphics):
    def writeDict(gml, key, label, contents, space, labelGraphics=None):
        gml.write('{1}{0} [\n'.format(key, space))
        for subKey in contents:
            if type(contents[subKey]) in [str]:
                gml.write('{2}\t{0} "{1}"\n'.format(subKey, contents[subKey], space))
            elif type(contents[subKey]) in [int]:
                gml.write('{2}\t{0} {1}\n'.format(subKey, contents[subKey], space))
            elif type(contents[subKey]) in [dict]:
                writeDict(gml, subKey, subKey, contents[subKey], space + '\t')
            if labelGraphics and label in labelGraphics:
                for labelInstance in labelGraphics[label]:
                    writeDict(gml, 'LabelGraphics', 'LabelGraphics', labelInstance, space + '\t')
        gml.write('{0}]\n'.format(space))

    gml = StringIO.StringIO()
    gml.write('graph [\n')
    gml.write('\tdirected 1\n')
    for node in graph.node:

        writeDict(gml, 'node', node, graph.node[node], '\t', labelGraphics)

    flag = False
    for x in nx.generate_gml(graph):
        if 'edge' in x and not flag:
            flag = True
        if flag:
            gml.write(x + '\n')
            

    #gml.write(']\n')
    with open(fileName, 'w') as f:
        f.write(gml.getvalue())
    nx.read_gml(fileName)
示例#9
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = "graph ["
                if directed is not None:
                    gml += " directed " + str(int(directed))
                if multigraph is not None:
                    gml += " multigraph " + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += " edge [ source 0 target 0 ]"
                gml += " ]"
                G = nx.parse_gml(gml)
                assert bool(directed) == G.is_directed()
                assert bool(multigraph) == G.is_multigraph()
                gml = "graph [\n"
                if directed is True:
                    gml += "  directed 1\n"
                if multigraph is True:
                    gml += "  multigraph 1\n"
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += "    key 0\n"
                gml += "  ]\n]"
                assert gml == "\n".join(nx.generate_gml(G))
示例#10
0
 def _write_model_file(self):
     """Write the model as a list of edges and community labels"""
     gml_file = "\n".join(
         nx.generate_gml(self.graph,
                         stringizer=nx.readwrite.gml.literal_stringizer))
     AWSWriter().write_model(
         gml_file, "models/network/" + self.topic + "_communities.gml")
示例#11
0
    def test_graph_types(self):
        for directed in [None, False, True]:
            for multigraph in [None, False, True]:
                gml = 'graph ['
                if directed is not None:
                    gml += ' directed ' + str(int(directed))
                if multigraph is not None:
                    gml += ' multigraph ' + str(int(multigraph))
                gml += ' node [ id 0 label "0" ]'
                gml += ' edge [ source 0 target 0 ]'
                gml += ' ]'
                G = nx.parse_gml(gml)
                assert_equal(bool(directed), G.is_directed())
                assert_equal(bool(multigraph), G.is_multigraph())
                gml = 'graph [\n'
                if directed is True:
                    gml += '  directed 1\n'
                if multigraph is True:
                    gml += '  multigraph 1\n'
                gml += """  node [
    id 0
    label "0"
  ]
  edge [
    source 0
    target 0
"""
                if multigraph:
                    gml += '    key 0\n'
                gml += '  ]\n]'
                assert_equal(gml, '\n'.join(nx.generate_gml(G)))
示例#12
0
def write_graph_list(name, graph_list, data_root):
    """Given a list of graphs in networkx format, write each of them
    in its own little gml file in a folder named name in the data_root folder.
    Create the folder, if necessary."""

    data_path = os.path.join(data_root, name)
    if not os.path.exists(data_path):
        os.makedirs(data_path)

    # compute right number of trailing zeros for file names
    format_positions = ceil(log10(len(graph_list)))

    for i, g in enumerate(graph_list):
        lines = nx.generate_gml(g)

        # stupid networkx requires labels to be equal to node ids.
        # we need to fix this
        def sanitize_labels(x):
            if x.find('label') == -1:
                return x + '\n'
            else:
                return '    label "1"\n'

        fixed_lines = map(sanitize_labels, lines)

        f = open(os.path.join(data_path, f'{i:0{format_positions}d}.gml'), 'w')
        f.writelines(fixed_lines)
        f.close()
示例#13
0
 def exportGML(self, filename):
     """
     Método exportar la red a formato GML
     
     Args:
         filename: ruta del nuevo fichero
     """
     self.writeFile(filename, nx.generate_gml(self.__G))
示例#14
0
 def __repr__(self):
     if self._repr:
         return self._repr
     res = "ins %d\n" % self.ins
     res += "outs %d\n" % self.outs
     for line in nx.generate_gml(self._g):
         res += "%s\n" % line
     return res
示例#15
0
def main():
    if len(sys.argv) != 3:
        sys.stderr.write('Usage: %s /path/to/flavornet.txt output.gml\n'.format(sys.argv[0]))
        sys.exit(1)

    ingredient2compounds, compound2ingredients = load_data(sys.argv[1])
    graph = generate_graph(ingredient2compounds, compound2ingredients)

    with open(sys.argv[2], 'w') as f:
        for line in nx.generate_gml(graph):
            f.write('{}\n'.format(line))
示例#16
0
    def test_labels_are_strings(self):
        # GML requires labels to be strings (i.e., in quotes)
        answer = """graph [
  node [
    id 0
    label "1203"
  ]
]"""
        G = nx.Graph()
        G.add_node(1203)
        data = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
        assert_equal(data, answer)
示例#17
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&"䑄��&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert_equal(
            '&"\x0f' + unichr(0x4444) + '��&unknown;',
            G.name)
        gml = '\n'.join(nx.generate_gml(G))
        assert_equal("""graph [
  name "&"䑄��&unknown;"
]""", gml)
示例#18
0
    def test_labels_are_strings(self):
        # GML requires labels to be strings (i.e., in quotes)
        answer = """graph [
  node [
    id 0
    label "1203"
  ]
]"""
        G = nx.Graph()
        G.add_node(1203)
        data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
        assert data == answer
示例#19
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&"䑄��&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert_equal(
            '&"\x0f' + unichr(0x4444) + '��&unknown;',
            G.name)
        gml = '\n'.join(nx.generate_gml(G))
        assert_equal("""graph [
  name "&"䑄��&unknown;"
]""", gml)
示例#20
0
    def test_escape_unescape(self):
        gml = """graph [
  name "&"䑄��&unknown;"
]"""
        G = nx.parse_gml(gml)
        assert ('&"\x0f' + chr(0x4444) +
                "��&unknown;" == G.name)
        gml = "\n".join(nx.generate_gml(G))
        alnu = "#1234567890;&#x1234567890abcdef"
        answer = ("""graph [
  name "&"䑄&""" + alnu + """;&unknown;"
]""")
        assert answer == gml
def exportToGML(n, e) :
    
    G=nx.Graph()
    
    #for node in n :
        #G.add_node(node[0], frequency=node[1])
        
    G.add_nodes_from(n)
    G.add_edges_from(e)
    print len(G.edges())
    
    graph = nx.generate_gml(G)
    print graph
    
    nx.write_gml(G, dir + output)
示例#22
0
    def set_graph_digest(self):
        """
        Hashes graph structure with ornamentation
        :return:
        """
        g_gml = nx.generate_gml(self.graph)
        #g_string = self.get_gml_str()
        g_string = self.get_graph_str()
        #g_string = 'twas brillig'
        self.g_string = g_string

        g_hash = self.hash_method(g_string)
        self.g_hash = g_hash                # Move these onto nodes attributes?
        self.g_digest = g_hash.hexdigest()     # Move these node attributes?
        return g_hash.hexdigest()
示例#23
0
文件: views.py 项目: rkdarst/jako
def download_cmtys(request, ds, did, cd, cdname, layer, format):
    fname_requested = format
    format = format.rsplit('.')[-1]

    fname = '%s-%s%s.%s'%(os.path.basename(ds.netfile.name), cdname, layer, format)
    if fname_requested != fname:
        return redirect(download_cmtys, did=did, cdname=cdname, layer=layer,
                        format=fname)

    cmtys = cd.get_results()[layer]

    data = [ ]
    content_type = 'text/plain'
    force_download = False
    if format == 'txt':
        for cname, cnodes in cmtys.iteritems():
            data.append(' '.join(str(x) for x in cnodes))
        data = '\n'.join(data)
    elif format == 'nc':
        for cname, cnodes in cmtys.iteritems():
            for node in cnodes:
                data.append('%s %s'%(node, cname))
        data = '\n'.join(data)
    elif format == 'gexf':
        g = ds.get_networkx()
        for node, cs in cmtys.nodecmtys().iteritems():
            g.node[node]['cmty'] = ' '.join(str(x) for x in cs)
        data = nx.generate_gexf(g)
        data = '\n'.join(data)
    elif format == 'gml':
        g = ds.get_networkx()
        for node, cs in cmtys.nodecmtys().iteritems():
            g.node[node]['cmty'] = ','.join(str(x) for x in cs)
        data = nx.generate_gml(g)
        data = '\n'.join(data)

    response = HttpResponse(content=data, content_type=content_type, )
    # If the data size is too big, force a download instead of viewing as text.
    if force_download or len(data) > 50 * 2**10:
        response['Content-Disposition'] = 'attachment; filename=%s'%fname
    return response
示例#24
0
    def test_tuplelabels(self):
        # https://github.com/networkx/networkx/pull/1048
        # Writing tuple labels to GML failed.
        G = nx.OrderedGraph()
        G.add_edge((0, 1), (1, 0))
        data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
        answer = """graph [
  node [
    id 0
    label "(0,1)"
  ]
  node [
    id 1
    label "(1,0)"
  ]
  edge [
    source 0
    target 1
  ]
]"""
        assert data == answer
示例#25
0
    def test_tuplelabels(self):
        # https://github.com/networkx/networkx/pull/1048
        # Writing tuple labels to GML failed.
        G = nx.OrderedGraph()
        G.add_edge((0, 1), (1, 0))
        data = '\n'.join(nx.generate_gml(G, stringizer=literal_stringizer))
        answer = """graph [
  node [
    id 0
    label "(0,1)"
  ]
  node [
    id 1
    label "(1,0)"
  ]
  edge [
    source 0
    target 1
  ]
]"""
        assert_equal(data, answer)
示例#26
0
    def test_tuplelabels(self):
        # https://github.com/networkx/networkx/pull/1048
        # Writing tuple labels to GML failed.
        G = networkx.Graph()
        G.add_edge((0, 1), (1, 0))
        data = '\n'.join(list(networkx.generate_gml(G)))
        answer = """graph [
  node [
    id 0
    label "(0, 1)"
  ]
  node [
    id 1
    label "(1, 0)"
  ]
  edge [
    source 0
    target 1
  ]
]"""
        assert_equal(data, answer)
示例#27
0
    def test_bool(self):
        G=networkx.Graph()
        G.add_node(1,on=True)
        G.add_edge(1,2,on=False)
        data = '\n'.join(list(networkx.generate_gml(G)))
        answer ="""graph [
  node [
    id 0
    label "1"
    on 1
  ]
  node [
    id 1
    label "2"
  ]
  edge [
    source 0
    target 1
    on 0
  ]
]"""
        assert_equal(data,answer)
示例#28
0
    def test_bool(self):
        G = networkx.Graph()
        G.add_node(1, on=True)
        G.add_edge(1, 2, on=False)
        data = '\n'.join(list(networkx.generate_gml(G)))
        answer = """graph [
  node [
    id 0
    label 1
    on 1
  ]
  node [
    id 1
    label 2
  ]
  edge [
    source 0
    target 1
    on 0
  ]
]"""
        assert_equal(data, answer)
示例#29
0
def ato_write_gml(graph, fileName, labelGraphics):
    def writeDict(gml, key, label, contents, space, labelGraphics=None):
        gml.write("{1}{0} [\n".format(key, space))
        for subKey in contents:
            if type(contents[subKey]) in [str]:
                gml.write('{2}\t{0} "{1}"\n'.format(subKey, contents[subKey], space))
            elif type(contents[subKey]) in [int]:
                gml.write("{2}\t{0} {1}\n".format(subKey, contents[subKey], space))
            elif type(contents[subKey]) in [dict]:
                writeDict(gml, subKey, subKey, contents[subKey], space + "\t")
            if labelGraphics and label in labelGraphics:
                for labelInstance in labelGraphics[label]:
                    writeDict(
                        gml,
                        "LabelGraphics",
                        "LabelGraphics",
                        labelInstance,
                        space + "\t",
                    )
        gml.write("{0}]\n".format(space))

    gml = StringIO.StringIO()
    gml.write("graph [\n")
    gml.write("\tdirected 1\n")
    for node in graph.node:

        writeDict(gml, "node", node, graph.node[node], "\t", labelGraphics)

    flag = False
    for x in nx.generate_gml(graph):
        if "edge" in x and not flag:
            flag = True
        if flag:
            gml.write(x + "\n")

    # gml.write(']\n')
    with open(fileName, "w") as f:
        f.write(gml.getvalue())
    nx.read_gml(fileName)
示例#30
0
 def save(self, graph):
     with self.fname.open('w') as fp:
         fp.write('\n'.join(html.unescape(line) for line in nx.generate_gml(graph)))
     return self.fname
    from sys import argv, exit
    data = load_data('data/data.tsv')

    if not [
            x for x in argv if x in [
                'sagart', 'graph', 'haudricourt', 'starostin', 'gabelentz',
                'pulleyblank', 'pan', 'wang'
            ]
    ]:
        exit()

    if 'graph' in argv:

        G = make_graph(data)
        with open('output/graph.gml', 'w') as f:
            for x in nx.generate_gml(G):
                f.write(html.unescape(x) + '\n')
        exit()

    condition3 = initial

    if 'sagart' in argv:
        condition1 = sandeng
        condition2 = lambda x: False if sandeng(x) else True
        cname = 'sagart'
        header = ['GROUP', 'MCH', 'B', 'A', 'none', 'PURITY']
        condition = lambda x: '1' in x and '2' in x

    if 'haudricourt' in argv:
        condition1 = final_p
        condition2 = final_t
示例#32
0
 def assert_generate_error(*args, **kwargs):
     pytest.raises(nx.NetworkXError,
                   lambda: list(nx.generate_gml(*args, **kwargs)))
示例#33
0
def _graph_or_file(graph, filename):
    if filename:
        util.write_text_file(filename + '.gml', nx.generate_gml(graph))
        return
    return graph
示例#34
0
文件: graph.py 项目: kadster/lingpy
def _graph_or_file(graph, filename):
    if filename:
        util.write_text_file(filename + '.gml', nx.generate_gml(graph))
        return
    return graph
示例#35
0
def getWholeGraph():
    str1 = ''.join(nx.generate_gml(G))
    return str1
示例#36
0
文件: netx.py 项目: epurdy/elegans
def to_gml(net):
    return '\n'.join(nx.generate_gml(net))
示例#37
0
        f.write('{0:10} '.format(pA[:9].replace(' ', '_')))
        for languageB in header:
            pB = pins[languageB]
            if pA == pB:
                f.write(' 0.00')
            else:

                weight = G[pA][pB]['nweight']
                f.write(' {0:.2f}'.format(weight))
        f.write('\n')


mst = nx.minimum_spanning_tree(G, weight='weight')

with open('graph.gml', 'w') as f:
    f.write('\n'.join(html.unescape(line) for line in nx.generate_gml(mst)))

plt.clf()
#pos = nx.circular_layout(mst)
nx.draw_networkx(mst, node_size=6, font_size=6)

plt.savefig('network.pdf')


# import data
new_locs = csv2list('mst-locations.tsv', strip_lines=False)
nG = nx.Graph()
for s, t in new_locs[1:]:
    if pins[s[1:-1]] not in nG:
        nG.add_node(pins[s[1:-1]], chinese=s[1:-1])
    if pins[t[1:-1]] not in nG:
 def export_gephi(self):
     return nx.generate_gml(self.graph)
示例#39
0
        pass
    else:
        g.add_node(target, **gml.vs[edge.target].attributes())
    g.add_edge(source,target,**edge.attributes())


def normalize(freqA, freqB, weight):
    """
    Normalize edge weights following Dellert's proposal.
    """

    return weight ** 2 / (freqA + freqB - weight)

for nA, nB, data in list(g.edges(data=True)):

    w = data['families']
    fA = occs[g.node[nA]['key']]
    fB = occs[g.node[nB]['key']]

    nw = normalize(fA, fB, w)
    if nw > 0:
        data['normalized_weight'] = nw * 100
    else:
        g.remove_edge(nA, nB)

with open('output/clics_b.gml', 'w') as f:
    for line in nx.generate_gml(g):
        f.write(line+'\n')
#nx.write_gml(g, 'output/clics_b.gml')

示例#40
0
 def save(self, graph):
     with self.fname.open('w') as fp:
         fp.write('\n'.join(
             html.unescape(line) for line in nx.generate_gml(graph)))
     return self.fname
示例#41
0
def restore(dataset, timestamp=None, fmt='dot'):
    """Restore reconstructs the network topology at a specific time in the past.

    Restore replays gossip messages from a dataset and reconstructs
    the network as it would have looked like at the specified
    timestamp in the past. The network is then printed to stdout using
    the format specified with `--fmt`.

    """
    if timestamp is None:
        timestamp = time.time()

    cutoff = timestamp - 2 * 7 * 24 * 3600
    channels = {}
    nodes = {}

    # Some target formats do not suport UTF-8 aliases.
    codec = 'UTF-8' if fmt in ['dot'] else 'ASCII'

    for m in tqdm(dataset, desc="Replaying gossip messages"):
        if isinstance(m, ChannelAnnouncement):

            channels[f"{m.short_channel_id}/0"] = {
                "source": m.node_ids[0].hex(),
                "destination": m.node_ids[1].hex(),
                "timestamp": 0,
                "features": m.features.hex(),
            }

            channels[f"{m.short_channel_id}/1"] = {
                "source": m.node_ids[1].hex(),
                "destination": m.node_ids[0].hex(),
                "timestamp": 0,
                "features": m.features.hex(),
            }

        elif isinstance(m, ChannelUpdate):
            scid = f"{m.short_channel_id}/{m.direction}"
            chan = channels.get(scid, None)
            ts = m.timestamp

            if ts > timestamp:
                # Skip this update, it's in the future.
                continue

            if ts < cutoff:
                # Skip updates that cannot possibly keep this channel alive
                continue

            if chan is None:
                raise ValueError(
                    f"Could not find channel with short_channel_id {scid}")

            if chan["timestamp"] > ts:
                # Skip this update, it's outdated.
                continue

            chan["timestamp"] = ts
            chan["fee_base_msat"] = m.fee_base_msat
            chan["fee_proportional_millionths"] = m.fee_proportional_millionths
            chan["htlc_minimim_msat"] = m.htlc_minimum_msat
            if m.htlc_maximum_msat:
                chan["htlc_maximum_msat"] = m.htlc_maximum_msat
            chan["cltv_expiry_delta"] = m.cltv_expiry_delta
        elif isinstance(m, NodeAnnouncement):
            node_id = m.node_id.hex()

            old = nodes.get(node_id, None)
            if old is not None and old["timestamp"] > m.timestamp:
                continue

            alias = m.alias.replace(b'\x00', b'').decode(codec, 'ignore')
            nodes[node_id] = {
                "id": node_id,
                "timestamp": m.timestamp,
                "features": m.features.hex(),
                "rgb_color": m.rgb_color.hex(),
                "alias": alias,
                "addresses": ",".join([str(a) for a in m.addresses]),
                "out_degree": 0,
                "in_degree": 0,
            }

    # Cleanup pass: drop channels that haven't seen an update in 2 weeks
    todelete = []
    for scid, chan in tqdm(channels.items(), desc="Pruning outdated channels"):
        if chan["timestamp"] < cutoff:
            todelete.append(scid)
        else:
            node = nodes.get(chan["source"], None)
            if node is None:
                continue
            else:
                node["out_degree"] += 1
            node = nodes.get(chan["destination"], None)
            if node is None:
                continue
            else:
                node["in_degree"] += 1

    for scid in todelete:
        del channels[scid]

    nodes = [
        n for n in nodes.values() if n["in_degree"] > 0 or n['out_degree'] > 0
    ]

    if len(channels) == 0:
        print(
            "ERROR: no channels are left after pruning, make sure to select a"
            "timestamp that is covered by the dataset.")
        sys.exit(1)

    g = nx.DiGraph()
    for n in nodes:
        g.add_node(n["id"], **n)

    for scid, c in channels.items():
        g.add_edge(c["source"], c["destination"], scid=scid, **c)

    if fmt == 'dot':
        print(nx.nx_pydot.to_pydot(g))

    elif fmt == 'gml':
        for line in nx.generate_gml(g):
            print(line)

    elif fmt == 'graphml':
        for line in nx.generate_graphml(g,
                                        named_key_ids=True,
                                        edge_id_from_attribute='scid'):
            print(line)

    elif fmt == 'json':
        print(json.dumps(json_graph.adjacency_data(g)))
示例#42
0
import matplotlib.pyplot as plt
import seaborn as sns

plt.figure(figsize=(10, 5))
G = nx.from_pandas_dataframe(
    links_df, 'gate-name', 'next-gate',
    ['count'])  # set number of flows as weight, and id of first gate
nx.set_node_attributes(G, 'group', gates)  # set a node attribute for each node
nx.set_node_attributes(G, 'fill', fill)  # set a fill color
edges, weights = zip(*nx.get_edge_attributes(G, 'count').items())
carac = pd.DataFrame(dat['nodes'])  # set group for each node

nx.draw_networkx(G,
                 edge_color=weights,
                 edge_cmap=plt.cm.hot,
                 node_color=carac['group'],
                 cmap=plt.cm.tab10,
                 font_size=8,
                 node_size=100)
sns.set_style('white')
sns.despine()
plt.axis('off')
plt.legend()
fig = plt.gcf()
plt.savefig('car3-network.png')
plt.show()

# plan 4 now to work on the arc diagram
print("\n".join(nx.generate_gml(G)))
# save to gml
nx.write_gml(G, 'car3.txt')
示例#43
0
def test_3(net):
    format = _3(net)
    assert format.open().read() == '%s\n' % '\n'.join(nx.generate_gml(net))
示例#44
0
def debug_print_gml(g):
    for t in nx.generate_gml(g):
        print t
示例#45
0
def dataset_from_random_graph(num_vars,
                              num_samples=1000,
                              prob_edge=0.3,
                              random_seed=100,
                              prob_type_of_data=(0.333, 0.333, 0.334)):
    """
    This function generates a dataset with discrete and continuous kinds of variables.
    It creates a random graph and models the variables linearly according to the relations in the graph.

    :param num_vars: Number of variables in the dataset
    :param num_samples: Number of samples in the dataset
    :param prob_edge : Probability of an edge between two random nodes in a graph
    :param random_seed: Seed for generating random graph
    :param prob_type_of_data : 3-element tuple containing the probability of data being discrete, binary and continuous respectively.
    :returns ret_dict : dictionary with information like dataframe, outcome, treatment, graph string and continuous, discrete and binary columns
    """
    assert (sum(list(prob_type_of_data)) == 1.0)
    np.random.seed(100)
    DAG = generate_random_graph(n=num_vars)
    mapping = dict(zip(DAG, string.ascii_lowercase))
    DAG = nx.relabel_nodes(DAG, mapping)
    all_nodes = list(DAG.nodes)
    all_nodes.sort()
    num_nodes = len(all_nodes)
    changed = dict()
    discrete_cols = []
    continuous_cols = []
    binary_cols = []
    random_numbers_array = np.random.rand(
        num_nodes
    )  #Random numbers between 0 to 1 to decide if that particular node will be discrete or continuous

    for node in all_nodes:
        changed[node] = False
    df = pd.DataFrame()
    currset = list()
    counter = 0

    #Generating data for nodes which have no incoming edges
    for node in all_nodes:
        if DAG.in_degree(node) == 0:
            x = random_numbers_array[counter]
            counter += 1
            if x <= prob_type_of_data[0]:
                df[node] = create_discrete_column(
                    num_samples)  #Generating discrete data
                discrete_cols.append(node)
            elif x <= prob_type_of_data[0] + prob_type_of_data[1]:
                df[node] = np.random.normal(
                    0, 1, num_samples)  #Generating continuous data
                continuous_cols.append(node)
            else:
                nums = np.random.normal(0, 1, num_samples)
                df[node] = np.vectorize(convert_to_binary)(
                    nums)  #Generating binary data
                discrete_cols.append(node)
                binary_cols.append(node)
            successors = list(
                DAG.successors(node)
            )  #Storing immediate successors for next level data generation
            successors.sort()
            currset.extend(successors)
            changed[node] = True

    #"currset" variable currently has all the successors of the nodes which had no incoming edges
    while len(currset) > 0:
        cs = list(
        )  #Variable to store immediate children of nodes present in "currset"
        for node in currset:
            predecessors = list(
                DAG.predecessors(node)
            )  #Getting all the parent nodes on which current "node" depends on
            if changed[node] == False and all(
                    changed[x] == True for x in predecessors
            ):  #Check if current "node" has not been processed yet and if all the parent nodes have been processed
                successors = list(DAG.successors(node))
                successors.sort()
                cs.extend(
                    successors
                )  #Storing immediate children for next level data generation
                X = df[predecessors].to_numpy()  #Using parent nodes data
                c = np.random.uniform(0, 1, len(predecessors))
                t = np.random.normal(
                    0, 1, num_samples
                ) + X @ c  #Using Linear Regression to generate data
                changed[node] = True
                x = random_numbers_array[counter]
                counter += 1
                if x <= prob_type_of_data[0]:
                    df[node] = convert_continuous_to_discrete(t)
                    discrete_cols.append(node)
                elif x <= prob_type_of_data[0] + prob_type_of_data[1]:
                    df[node] = t
                    continuous_cols.append(node)
                else:
                    nums = np.random.normal(0, 1, num_samples)
                    df[node] = np.vectorize(convert_to_binary)(nums)
                    discrete_cols.append(node)
                    binary_cols.append(node)
        currset = cs

    outcome = None
    for node in all_nodes:
        if DAG.out_degree(node) == 0:
            outcome = node  #Node which has no successors is outcome
            break

    treatments = list()
    for node in all_nodes:
        if DAG.in_degree(node) > 0:
            children = list(DAG.successors(node))
            if outcome in children:
                treatments.append(
                    node)  #Node which causes outcome is treatment

    gml_str = ("\n".join(nx.generate_gml(DAG)))
    ret_dict = {
        "df": df,
        "outcome_name": outcome,
        "treatment_name": treatments,
        "gml_graph": gml_str,
        "discrete_columns": discrete_cols,
        "continuous_columns": continuous_cols,
        "binary_columns": binary_cols
    }
    return ret_dict
示例#46
0
 def _serialize(self, population):
     return ''.join(nx.generate_gml(population.graph))
示例#47
0
def write_gml(graph, path):
    
    with open(path, 'w') as f:
        f.write('\n'.join([html.unescape(x) for x in
            nx.generate_gml(graph)]))
    print("wrote data to file")
示例#48
0
 def assert_generate_error(*args, **kwargs):
     assert_raises(nx.NetworkXError,
                   lambda: list(nx.generate_gml(*args, **kwargs)))
示例#49
0

glarge = [g for g in gcoms if g >= 5]
print(sum(glarge),len(glarge))
plt.hist(gcoms,bins=40)
plt.savefig('test.svg')
plt.clf()

a = """
Communities:    {0}
Coms > 5   :    {1}
Coverage   :    {2}, {3:.2f}
Concepts   :    {4}
Conc/Com   :    {5:.2f}
""".format(
        (glen - sum(gcoms)) + len(gcoms),
        len(glarge),
        sum(glarge),
        sum(glarge) / len(newg),
        glen,
        sum(gcoms) / len(gcoms)
        )
print(a)
with open('statsoncdec.stats','w') as f:
    f.write(a)

os.system('git add website/clics.de/data/communities/*.json')
with open('output/clics_c.gml','w') as f:
    for line in nx.generate_gml(newg):
        f.write(line+'\n')