def test_generate_graphml(self):

        self.ts = pyTripleSimple.SimpleTripleStore()
        f = open("acme.nt")
        self.ts.load_ntriples(f)
        f.close()
        egfrsts_obj = pyTripleSimple.ExtractGraphFromSimpleTripleStore(self.ts)
        egfrsts_obj.register_label()
        egfrsts_obj.register_class()
        egfrsts_obj.add_pattern_for_links([['a','b','c']],[('b','in',['<http://acme.com/rdf#isLabeller>'])],("a","c"), "labeller")
        egfrsts_obj.register_node_predicate("<http://acme.com/rdf#ndc/date_issued>", "date", lambda x : x.upper())
        result_xml = egfrsts_obj.translate_into_graphml_file()

        from xml.etree.ElementTree import XML
        elements = XML(result_xml)
        xml_tags = []

        for element in elements:
            xml_tags.append(element.tag)
        self.assertTrue("{http://graphml.graphdrawing.org/xmlns}key" in xml_tags)
        self.assertTrue("{http://graphml.graphdrawing.org/xmlns}graph" in xml_tags)

        try:
            import networkx
            fo = open("acme.graphml","w")
            fo.write(result_xml)
            fo.close()
            networkx.read_graphml("acme.graphml")
            f.close()
        except ImportError:
            pass
Beispiel #2
0
def load_graphml(input_data):
    #TODO: allow default properties to be passed in as dicts


    try:
        graph = nx.read_graphml(input_data)
    except IOError, e:
        acceptable_errors = set([
            2, # no such file or directory
            36, # input string too long for filename
            63, # input string too long for filename
            ])
        if e.errno in acceptable_errors:
            # try as data string rather than filename string
            try:
                input_pseduo_fh = StringIO(input_data) # load into filehandle to networkx
                graph = nx.read_graphml(input_pseduo_fh)
            except IOError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except IndexError:
                raise autonetkit.exception.AnkIncorrectFileFormat
        else:
            try:
                import autonetkit.console_script as cs
                input_data=cs.parse_options().file
                graph = nx.read_graphml(input_data)
            except IOError:
                raise e
Beispiel #3
0
def create_joined_multigraph():
    G=nx.DiGraph()
    upp=nx.read_graphml('upperlevel_hashtags.graphml')
    for ed in upp.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2])
        G.add_edge(ed[1],ed[0],attr_dict=ed[2])
    mid=nx.read_graphml('friendship_graph.graphml')
    for ed in mid.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2]) 
    inter=nx.read_graphml('interlevel_hashtags.graphml')
    for ed in inter.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2]) 
        G.add_edge(ed[1],ed[0],attr_dict=ed[2])
    down=nx.read_graphml('retweet.graphml')
    mapping_f={}
    for i,v in enumerate(down.nodes()):
        mapping_f[v]='%iretweet_net' %i
    for ed in down.edges(data=True):
        G.add_edge(mapping_f[ed[0]],mapping_f[ed[1]],attr_dict=ed[2]) 

    for nd in mid.nodes():
        if nd in mapping_f:
            G.add_edge(nd,mapping_f[nd])
            G.add_edge(mapping_f[nd],nd)
    nx.write_graphml(G,'joined_3layerdigraph.graphm')
    return G,upp.nodes(),mid.nodes(),mapping_f.values()
def load_graphml(input_data):

    # TODO: allow default properties to be passed in as dicts

    try:
        graph = nx.read_graphml(input_data)
    except IOError, e:
        acceptable_errors = set([2, 36, 63])  # no such file or directory
                                              # input string too long for filename
                                              # input string too long for filename
        if e.errno in acceptable_errors:
            from xml.etree.cElementTree import ParseError

            # try as data string rather than filename string

            try:
                input_pseduo_fh = StringIO(input_data)  # load into filehandle to networkx
                graph = nx.read_graphml(input_pseduo_fh)
            except IOError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except IndexError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
            except ParseError:
                raise autonetkit.exception.AnkIncorrectFileFormat
        else:
            raise e
Beispiel #5
0
def test_real_graph(nparts):
    logging.info('Reading author collab graph')
    author_graph = nx.read_graphml('/home/amir/az/io/spam/mgraph2.gexf')
    author_graph.name = 'author graph'
    logging.info('Reading the full author product graph')
    full_graph = nx.read_graphml('/home/amir/az/io/spam/spam_graph.graphml')
    full_graph.name = 'full graph'

    proper_author_graph = author_graph.subgraph([a for a in author_graph if 'revLen' in author_graph.node[a]
                                                                            and 'hlpful_fav_unfav' in author_graph.node[a]
                                                                            and 'vrf_prchs_fav_unfav' in author_graph.node[a]])
    # features = {'revLen': 0.0, 'hlpful_fav_unfav': False, 'vrf_prchs_fav_unfav': False}
    # for a in author_graph:
    #     for feat, def_val in features.items():
    #         if feat not in author_graph.node[a]:
    #             author_graph.node[a][feat] = def_val

    # sub sample proper_author_graph
    # proper_author_graph.remove_edges_from(random.sample(proper_author_graph.edges(), 2*proper_author_graph.size()/3))
    # degree = proper_author_graph.degree()
    # proper_author_graph.remove_nodes_from([n for n in proper_author_graph if degree[n] == 0])
    # author to the product reviewed by him mapping
    logging.debug('forming the product mapping')
    author_product_mapping = {}
    for a in proper_author_graph:
        author_product_mapping[a] = [p for p in full_graph[a] if 'starRating' in full_graph[a][p] and
                                                                 full_graph[a][p]['starRating'] >= 4]
    logging.debug('Running EM')
    ll, partition = HardEM.run_EM(proper_author_graph, author_product_mapping, nparts=nparts, parallel=True)
    print 'best loglikelihood: %s' % ll
    for n in partition:
        author_graph.node[n]['cLabel'] = int(partition[n])
    nx.write_gexf(author_graph, '/home/amir/az/io/spam/spam_graph_mgraph_sage_labeled.gexf')
Beispiel #6
0
 def graph_from_file(cls, path, file_format=GraphFileFormat.GraphMl):
     if file_format == GraphFileFormat.GraphMl:
         graph = net.read_graphml(path)
     elif file_format == GraphFileFormat.AdjList:
         graph = net.read_adjlist(path)
     elif file_format == GraphFileFormat.Gml:
         graph = net.read_gml(path)
     elif file_format == GraphFileFormat.Yaml:
         graph = net.read_yaml(path)
     else:
         graph = net.read_graphml(path)
     return cls(graph=graph)
 def __init__(self, configurationFilePath, paretoFilePath):
     """
         @param configurationFilePath: path to the file that has information
         about how the experiment was runned: 1) demand center, 
         2) distribution center, 3) objective functions
         
         @param paretoFilePath: path to the file containing the Pareto set.
         This file also has information about the objective function types 
         The pareto was created by running the program facility-location using
         the configurationFilePath  
     """
     self.configurationFilePath = configurationFilePath
     self.paretoFilePath = paretoFilePath
     configurationFile = open(self.configurationFilePath, 'r')
     paretoFile = open(self.paretoFilePath, 'r')
     
     print 'configuration file', self.configurationFilePath
     print 'pareto file', self.paretoFilePath
     tree = ET.parse(self.configurationFilePath)
     #obtain the file path of the distribution centers
     for elem in tree.iter(tag='distributionCenters'):
         self.distributionCentersFilePath = elem.text
     #obtain the file path of the demand centers
     for elem in tree.iter(tag='demandCenters'):
         self.demandCentersFilePath = elem.text
     
     
     os.path.normpath(self.distributionCentersFilePath)
     os.path.normpath(self.demandCentersFilePath)
     
     #load the demand and distribution centers as s
     distributionCenters = nx.read_graphml(self.distributionCentersFilePath, node_type=int)
     demandCenters = nx.read_graphml(self.demandCentersFilePath, node_type=int)
     
     #load the ids of the distribution centers of the pareto set
     os.path.normpath(self.paretoFilePath)
     pareto = myutils.load_pareto(self.paretoFilePath)
     """
     probabilityFailureProblem = probabilityfailureproblem.ProbabilityFailureProblem(demandCenters=demandCenters,
                                                                                     distributionCenters=distributionCenters,
                                                                                     pareto=pareto
                                                                                     )
     """
     probabilityFailureProblem = probabilityfailureproblem.ProbabilityFailureProblem(demandCentersFilePath=self.demandCentersFilePath,
                                                                                     distributionCentersFilePath=self.distributionCentersFilePath,
                                                                                     pareto=pareto,
                                                                                     configurationFilePath=configurationFilePath
                                                                                     )
Beispiel #8
0
def graph_product(G_file):
    
    #TODO: take in a graph (eg when called from graphml) rather than re-reading the graph again
    LOG.info("Applying graph product to %s" % G_file)
    H_graphs = {}
    try:
        G = nx.read_graphml(G_file).to_undirected()
    except IOError:
        G = nx.read_gml(G_file).to_undirected()
        return
    G = remove_yed_edge_id(G)
    G = remove_gml_node_id(G)
#Note: copy=True causes problems if relabelling with same node name -> loses node data
    G = nx.relabel_nodes(G, dict((n, data.get('label', n)) for n, data in G.nodes(data=True)))
    G_path = os.path.split(G_file)[0]
    H_labels  = defaultdict(list)
    for n, data in G.nodes(data=True):
        H_labels[data.get("H")].append(n)

    for label in H_labels.keys():
        try:
            H_file = os.path.join(G_path, "%s.graphml" % label)
            H = nx.read_graphml(H_file).to_undirected()
        except IOError:
            try:
                H_file = os.path.join(G_path, "%s.gml" % label)
                H = nx.read_gml(H_file).to_undirected()
            except IOError:
                LOG.warn("Unable to read H_graph %s, used on nodes %s" % (H_file, ", ".join(H_labels[label])))
                return
        root_nodes = [n for n in H if H.node[n].get("root")]
        if len(root_nodes):
# some nodes have root set
            non_root_nodes = set(H.nodes()) - set(root_nodes)
            H.add_nodes_from( (n, dict(root=False)) for n in non_root_nodes)
        H = remove_yed_edge_id(H)
        H = remove_gml_node_id(H)
        nx.relabel_nodes(H, dict((n, data.get('label', n)) for n, data in H.nodes(data=True)), copy=False)
        H_graphs[label] = H

    G_out = nx.Graph()
    G_out.add_nodes_from(node_list(G, H_graphs))
    G_out.add_nodes_from(propagate_node_attributes(G, H_graphs, G_out.nodes()))
    G_out.add_edges_from(intra_pop_links(G, H_graphs))
    G_out.add_edges_from(inter_pop_links(G, H_graphs))
    G_out.add_edges_from(propagate_edge_attributes(G, H_graphs, G_out.edges()))
#TODO: need to set default ASN, etc?
    return G_out
Beispiel #9
0
def showXY(fnm, x="kx", y="ky"):
    g = nx.read_graphml(fnm)
    x = nx.get_node_attributes(g, x)
    y = nx.get_node_attributes(g, y)
    coords = zip(x.values(),y.values())
    pos = dict(zip(g.nodes(), coords))
    nx.draw(g,pos)
Beispiel #10
0
def showX(fnm):
    g = nx.read_graphml(fnm)
    x = nx.get_node_attributes(g, 'soma_pos')
    y = [0] * len(x)
    coords = zip(x.values(), y)
    pos = dict(zip(g.nodes(), coords))
    nx.draw(g,pos)    
Beispiel #11
0
    def test_write_read_attribute_numeric_type_graphml(self):
        from xml.etree.ElementTree import parse

        G = self.attribute_numeric_type_graph
        fh = io.BytesIO()
        nx.write_graphml(G, fh, infer_numeric_types=True)
        fh.seek(0)
        H = nx.read_graphml(fh)
        fh.seek(0)

        assert_equal(sorted(G.nodes()), sorted(H.nodes()))
        assert_equal(sorted(G.edges()), sorted(H.edges()))
        assert_equal(sorted(G.edges(data=True)),
                     sorted(H.edges(data=True)))
        self.attribute_numeric_type_fh.seek(0)

        xml = parse(fh)
        # Children are the key elements, and the graph element
        children = xml.getroot().getchildren()
        assert_equal(len(children), 3)

        keys = [child.items() for child in children[:2]]

        assert_equal(len(keys), 2)
        assert_in(('attr.type', 'double'), keys[0])
        assert_in(('attr.type', 'double'), keys[1])
Beispiel #12
0
 def test_preserve_multi_edge_data(self):
     """
     Test that data and keys of edges are preserved on consequent
     write and reads
     """
     G = nx.MultiGraph()
     G.add_node(1)
     G.add_node(2)
     G.add_edges_from([
         # edges with no data, no keys:
         (1, 2),
         # edges with only data:
         (1, 2, dict(key='data_key1')),
         (1, 2, dict(id='data_id2')),
         (1, 2, dict(key='data_key3', id='data_id3')),
         # edges with both data and keys:
         (1, 2, 103, dict(key='data_key4')),
         (1, 2, 104, dict(id='data_id5')),
         (1, 2, 105, dict(key='data_key6', id='data_id7')),
     ])
     fh = io.BytesIO()
     nx.write_graphml(G, fh)
     fh.seek(0)
     H = nx.read_graphml(fh, node_type=int)
     assert_edges_equal(
         G.edges(data=True, keys=True), H.edges(data=True, keys=True)
     )
     assert_equal(G._adj, H._adj)
Beispiel #13
0
def ws_calc(path):
    """
    Given a path to a file graph generated by the GMM, calucualte C(p) and L(p)
    """
    G=nx.read_graphml(path)
    file_split=path.split('_')
    return({'p':float(file_split[4]), 'cc':nx.average_clustering(G), 'avg.pl':nx.average_shortest_path_length(G)})
def main():
    arg_parser = ArgumentParser(description='add edge weights to tree')
    arg_parser.add_argument('--input', required=True,
                            help='inpput file')
    arg_parser.add_argument('--output', required=True,
                            help='outpput file')
    arg_parser.add_argument('--seed', type=int, default=None,
                            help='seed for random number generator')
    arg_parser.add_argument('--delim', dest='delimiter', default=' ',
                            help='delimiter for edge list')
    arg_parser.add_argument('--no-data', action='store_true',
                            dest='no_data', help='show edge data')
    arg_parser.add_argument('--edge-list', action='store_true',
                            help='generate edge list output')
    options = arg_parser.parse_args()
    random.seed(options.seed)
    tree = nx.read_graphml(options.input)
    add_edge_weights(tree)
    if options.edge_list:
        nx.write_edgelist(tree, options.output,
                          delimiter=options.delimiter,
                          data=not options.no_data)
    else:
        nx.write_graphml(tree, options.output)
    return 0
Beispiel #15
0
def graphmltojson(graphfile, outfile):
	"""
	Converts GraphML file to json while adding communities/modularity groups
	using python-louvain. JSON output is usable with D3 force layout.
	Usage:
	>>> python convert.py -i mygraph.graphml -o outfile.json
	"""
	
	G = nx.read_graphml(graphfile)
	G = nx.Graph(G)
	#G = nx.DiGraph.to_undirected(G)

	#karate = Nexus.get(G)
	#cl = karate.community_fastgreedy()
	#k = 57
	#cl.as_clustering(k).membership

	#finds best community using louvain
	partition = community.best_partition(G)
 
	#adds partition/community number as attribute named 'modularitygroup'
	for n,d in G.nodes_iter(data=True):
		d['group'] = partition[n]
 
	node_link = json_graph.node_link_data(G)
	json = json_graph.dumps(node_link)
	
	# Write to file
	fo = open(outfile, "w")
	fo.write(json);
	fo.close()
    def test_bool(self):
        s="""<?xml version="1.0" encoding="UTF-8"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns"
      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
      xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns
        http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">
  <key id="d0" for="node" attr.name="test" attr.type="boolean">
    <default>false</default>
  </key>
  <graph id="G" edgedefault="directed">
    <node id="n0">
      <data key="d0">True</data>
    </node>
    <node id="n1"/>
    <node id="n2">
      <data key="d0">False</data>
    </node>
    <node id="n3">
      <data key="d0">true</data>
    </node>
    <node id="n4">
      <data key="d0">false</data>
    </node>


  </graph>
</graphml>
"""
        fh = io.BytesIO(s.encode('UTF-8'))
        G=nx.read_graphml(fh)
        assert_equal(G.node['n0']['test'],True)
        assert_equal(G.node['n2']['test'],False)
Beispiel #17
0
def igraph_draw_traj(filname,pold,polar=True,layout=None):
    import igraph as ig
    g = ig.read(filname,format="graphml")
    pols=[]
    for i in g.vs:
        pols.append(pold[i['id']])
    # print pols
    if polar:
        rgbs = [(1-(i+1.)/2,(i+1.)/2,0) for i in pols]
    else:
        rgbs = [(1-i,i,0) for i in pols]
    # print filname
    GGG=nx.read_graphml(filname)
    g.vs["label"] = GGG.nodes()
    visual_style = {}
    visual_style["vertex_size"] = 15
    visual_style['vertex_color']=rgbs#'pink'
    visual_style['vertex_label_size']='10'
    visual_style["vertex_label"] = g.vs["label"]
    if layout==None:
        layout=g.layout("kk")
    # else:

    visual_style["layout"] = layout
    visual_style["bbox"] = (700, 700)
    visual_style["margin"] = 100
    return g,visual_style,layout
Beispiel #18
0
def create_joint_hasht_friends():
    G=nx.DiGraph()
    upp=nx.read_graphml('upperlevel_hashtags.graphml')
    for ed in upp.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2])
        G.add_edge(ed[1],ed[0],attr_dict=ed[2])
    down=nx.read_graphml('retweet.graphml')
    for ed in down.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2])
    inter=nx.read_graphml('interlevel_hashtags.graphml')
    for ed in inter.edges(data=True):
        G.add_edge(ed[0],ed[1],attr_dict=ed[2]) 
        G.add_edge(ed[1],ed[0],attr_dict=ed[2])
    nx.write_graphml(G,'joined_hasht_retweet.graphm')

    return G,upp.nodes(),down.nodes()
def main():
    universe = nx.read_graphml(sys.argv[1])
    beings = filter(lambda x: x[1]["type"] == "Being", universe.nodes(data=True))
    d ={}
    i=0
    for b in beings:
        ns = nx.neighbors(universe,b[0])
        if universe.node[ns[0]]["type"] == "client":
            if "names" in universe.node[b[0]]:
                n = universe.node[b[0]]["names"]
                d[n] = set(map(lambda x: universe.node[x]["name"], ns))
            else:                
                d["UNCLAIMED-{}".format(i)] = set(map(lambda x: universe.node[x]["name"], ns))
                i = i+1
            
    for k in sorted(d.keys()):
        if len(d[k]) == 1 and list(d[k])[0] == k:
            print(list(d[k])[0])
        elif len(d[k]) == 1 and list(d[k])[0] != k:
            print(list(d[k])[0]+" ==> "+k)            
        else:
            print(k)
            print("--------------------")
            for n in d[k]:
                print(n)
        print("\n")
def main():
    project = sys.argv[1]

    if project[-1] != "/":
        project += "/"

    usegraphs = []

    #Get usefull graphs

    if "--usegraph" in sys.argv:
        usegraphs.append("{0}{1}".format(project, sys.argv[sys.argv.index("--usegraph") + 1]))
    else:
        for f in os.listdir(project):
            if "usegraph" in f:
                usegraphs.append("{0}{1}".format(project, f))

    for ug in usegraphs:
        print("READ {0}".format(ug))

        g = nx.read_graphml(ug)

        nb_nodes = g.number_of_nodes()
        nb_edges = g.number_of_edges()

        nb_paths = compute_paths(project, g)

        print("Stats for {0}:".format(ug))
        print("\tnumber of nodes : {0}".format(nb_nodes))
        print("\tnumber of edges : {0}".format(nb_edges))
        print("\tnumber of paths : {0}".format(nb_paths))

    print("Done!")
Beispiel #21
0
def main(args):
    """
    Entry point.
    """
    if len(args) == 0:
        print "Usage: python disease.py <params file>"
        sys.exit(1)

    # Load the simulation parameters.
    params = json.load((open(args[0], "r")))
    network_params = params["network_params"]

    # Setup the network.
    if network_params["name"] == "read_graphml":
        G = networkx.read_graphml(network_params["args"]["path"])
        G = networkx.convert_node_labels_to_integers(G)
    else:
        G = getattr(networkx, network_params["name"])(**network_params["args"])

    # Carry out the requested number of trials of the disease dynamics and 
    # average the results.
    Sm, Im, Rm, Rv = 0.0, 0.0, 0.0, 0.0
    for t in range(1, params["trials"] + 1):
        S, I, R = single_trial(G, params)
        Rm_prev = Rm
        Sm += (S - Sm) / t
        Im += (I - Im) / t
        Rm += (R - Rm) / t
        Rv += (R - Rm) * (R - Rm_prev)

    # Print the average
    print("%.3f\t%.3f\t%.3f\t%.3f" \
          %(Sm, Im, Rm, (Rv / params["trials"]) ** 0.5))
Beispiel #22
0
def draw(args):
    """
    Draw a GraphML with the tribe draw method.
    """
    G = nx.read_graphml(args.graphml[0])
    draw_social_network(G, args.write)
    return ""
Beispiel #23
0
def _def_node_info():
    """Return node info as networkx object with data for each node"""
    import networkx
    import os.path
    module_path = os.path.split(__file__)[0]
    node_info_path = os.path.join(module_path,'data/freesurfer_node_info.graphml')
    return networkx.read_graphml(node_info_path)
def walktrapFile(fName):
    # Writing the graph edge as needed by walktrap
    grDir = os.getcwd() + "/CSV/Graphs/" + fName
    wDir = os.getcwd() + "/CSV/WalkTrap/" + fName
    if os.path.exists(wDir):
        print "Directory: " + wDir + " Already exists"
        print "please delete " + fName + " before rerun"
        return -1
    else:
        os.mkdir(wDir)
    if os.path.exists(grDir):
        for un1, un2, u3 in os.walk(grDir):
            graphs = u3
            break
    else:
        print "Error in walktrapFile function:"
        print grDir + " Do not exists"
        return -1
    print "Preparing files for Walktrap from raw graphs..."
    for w in u3:
        print "---------------------------------------------"
        print "Prefix: " + w.replace(".G", "").replace("s", "/")
        G = nx.read_graphml(grDir + "/" + w)
        a = sorted(G.nodes())
        f = open(wDir + "/" + w.replace(".G", ".w"), "w")
        maxx = 0
        for edge in G.edges():
            w = G[edge[0]][edge[1]]["weight"]
            ind1 = a.index(edge[0])
            ind2 = a.index(edge[1])
            maxx = max(max(ind1, ind2), maxx)
            s = str(ind1) + " " + str(ind2) + " " + str(w) + "\n"
            f.write(s)
        f.close()
Beispiel #25
0
def create_graph_df(vtask_paths, graphs_dir_out):
    """
    Creates a frame that maps sourcefiles to networkx digraphs in terms of DOT files
    :param source_path_list:
    :param dest_dir_path:
    :param relabel:
    :return:
    """
    if not isdir(graphs_dir_out):
        raise ValueError('Invalid destination directory.')
    data = []
    graphgen_times = []

    print('Writing graph representations of verification tasks to {}'.format(graphs_dir_out), flush=True)

    common_prefix = commonprefix(vtask_paths)
    for vtask in tqdm(vtask_paths):
        short_prefix = dirname(common_prefix)
        path = join(graphs_dir_out, vtask[len(short_prefix):][1:])

        if not os.path.exists(dirname(path)):
            os.makedirs(dirname(path))

        ret_path = path + '.pickle'

        # DEBUG
        if isfile(ret_path):
            data.append(ret_path)
            continue

        start_time = time.time()

        graph_path, node_labels_path, edge_types_path, edge_truth_path, node_depths_path \
            = _run_cpachecker(abspath(vtask))
        nx_digraph = nx.read_graphml(graph_path)

        node_labels = _read_node_labeling(node_labels_path)
        nx.set_node_attributes(nx_digraph, 'label', node_labels)

        edge_types = _read_edge_labeling(edge_types_path)
        parsed_edge_types = _parse_edge(edge_types)
        nx.set_edge_attributes(nx_digraph, 'type', parsed_edge_types)

        edge_truth = _read_edge_labeling(edge_truth_path)
        parsed_edge_truth = _parse_edge(edge_truth)
        nx.set_edge_attributes(nx_digraph, 'truth', parsed_edge_truth)

        node_depths = _read_node_labeling(node_depths_path)
        parsed_node_depths = _parse_node_depth(node_depths)
        nx.set_node_attributes(nx_digraph, 'depth', parsed_node_depths)

        assert not isfile(ret_path)
        assert node_labels and parsed_edge_types and parsed_edge_truth and parsed_node_depths
        nx.write_gpickle(nx_digraph, ret_path)
        data.append(ret_path)

        gg_time = time.time() - start_time
        graphgen_times.append(gg_time)

    return pd.DataFrame({'graph_representation': data}, index=vtask_paths), graphgen_times
Beispiel #26
0
    def _read_cell_graph(self, filename, format):
        """Load the cell-to-cell connectivity graph from a
        file. 

        Returns None if any error happens.
        """
        cell_graph = None
        if filename:
            try:
                start = datetime.now()
                if format == "gml":
                    cell_graph = nx.read_gml(filename)
                elif format == "pickle":
                    cell_graph = nx.read_gpickle(filename)
                elif format == "edgelist":
                    cell_graph = nx.read_edgelist(filename)
                elif format == "yaml":
                    cell_graph = nx.read_yaml(filename)
                elif format == "graphml":
                    cell_graph = cell_graph = nx.read_graphml(filename)
                else:
                    print "Unrecognized format:", format
                end = datetime.now()
                delta = end - start
                config.BENCHMARK_LOGGER.info(
                    "Read cell_graph from file %s of format %s in %g s"
                    % (filename, format, delta.seconds + 1e-6 * delta.microseconds)
                )
            except Exception, e:
                print e
def main( ):
	# graph = cp.readGML(savedGraphs["scalefree"])
	d = dictionnaryFunctions()
	wf = weightFunctions()
	met = methodes_complementaires()

	# graph = cp.graphGenerators (500, "scalefree")
	# graph = cp.generateRandomWeights(graph)
	# graph = nx.DiGraph(graph)

	graph_json_pause = met.read_json_file(json_graphe)

	graph = nx.read_graphml(graphdataset)

	graph = gt.createAndSaveBlockModel(graph)
	cp.writeJsonFile(graph, json_graph_filename)

	print("\n\n")
	print(nx.info(graph))
	print("\n\n")

	d.createDictNodeNumberToId(graph)
	d.checkDictionnary()
	w = wf.graphWeightsOnArcs(graph)

	#miProgram(graph, w, "influencersAdjacencyMatrixWithBlocksAndColouringFunction", json_graph_filename)

	for model in ["neighbouringInfluencersWithoutBinaryVariables", "influencersAdjacencyMatrix"]:
		miProgram(graph, w, model, json_graph_filename)
Beispiel #28
0
    def _read_celltype_graph(self, celltypes_file, format="gml"):
        """
        Read celltype-celltype connectivity graph from file.

        celltypes_file -- the path of the file containing
        the graph.
        
        format -- format of the file. allowed values: gml, graphml, edgelist, pickle, yaml.

        """
        start = datetime.now()
        celltype_graph = None
        try:
            if format == "gml":
                celltype_graph = nx.read_gml(celltypes_file)
            elif format == "edgelist":
                celltype_graph = nx.read_edgelist(celltypes_file)
            elif format == "graphml":
                celltype_graph = nx.read_graphml(celltypes_file)
            elif format == "pickle":
                celltype_graph = nx.read_gpickle(celltypes_file)
            elif format == "yaml":
                celltype_graph = nx.read_yaml(celltypes_file)
            else:
                print "Unrecognized format %s" % (format)
        except Exception, e:
            print e
def UoSM_input(fName, w):
    # for the name of the graph add .G
    # for the name of communities add .C
    gFile = os.getcwd() + "/CSV/Graphs/" + fName + "/" + w + ".G"
    wFile = os.getcwd() + "/CSV/WalkTrap/" + fName + "/" + w + ".C"
    if (not os.path.exists(gFile)) or (not os.path.exists(wFile)):
        print "Error: " + gFile + " or " + wFile + " not found"
        return
    G = nx.read_graphml(gFile)
    try:
        f = open(wFile, "r")
    except IOError:
        return
    a = sorted(G.nodes())
    # ~ b=[str(xx) for xx in range(len(a))]
    # ~ myDic=list2dic(b,a)
    C = []
    for k, line in enumerate(f):
        for line in f:
            t1 = line.strip(" {}\t\n")
            t2 = t1.split(",")
            t = [xx.strip() for xx in t2]
            # ~ ll=[myDic[xx][0] for xx in t]
            ll = [a[int(xx)] for xx in t]
            C.append(ll)
    return C
Beispiel #30
0
def main(args):
    """
    Entry point.
    """
    if len(args) != 2:
        sys.exit(__doc__ %{"script_name" : args[0].split("/")[-1]})

    # Load the simulation parameters.
    params = json.load((open(args[1], "r")))
    network_params = params["network_params"]

    # Setup the network.
    G = networkx.read_graphml(network_params["args"]["path"])
    G = networkx.convert_node_labels_to_integers(G)

    # Load the attack sequences.
    fname = network_params["args"]["path"].replace(".graphml", ".pkl")
    attack_sequences = pickle.load(open(fname, "rb"))
    
    # Carry out the requested number of trials of the disease dynamics and 
    # average the results.
    Sm, Im, Rm, Rv = 0.0, 0.0, 0.0, 0.0
    for t in range(1, params["trials"] + 1):
        S, I, R = single_trial(G, params, attack_sequences)
        Rm_prev = Rm
        Sm += (S - Sm) / t
        Im += (I - Im) / t
        Rm += (R - Rm) / t
        Rv += (R - Rm) * (R - Rm_prev)

    # Print the average
    print("%.3f\t%.3f\t%.3f\t%.3f" \
          %(Sm, Im, Rm, (Rv / params["trials"]) ** 0.5))
Beispiel #31
0
# Reading graph_ml, accessing nodes, edges and functions+n

import itertools, copy
import networkx as nx
import pandas as pd
import matplotlib.pyplot as plt

G5 = nx.read_graphml('./graph_ml/grid_5_5.graphml')

print('\nnodes: ', G5.nodes, '\nedges: ', G5.edges)
print('\nnodes.n: ', G5.number_of_nodes(), '\nedges.n: ', G5.number_of_edges())
#print(list(G5.adj['m']), G5.degree[1])
print('\nnodes.adj: ', G5.adj.items(), '\nnodes.data: ', G5.nodes.data())
#print('\nadj_matrix: ', nx.adjacency_matrix(G5))
plt.subplot(121)
nx.draw(G5, with_labels=True, font_weight='bold')
plt.show()

#end of code
Beispiel #32
0
    def get(self):
        G = nx.read_graphml('newGraph.xml')
        G = G.to_undirected()

        for n in G.nodes():
            if G.degree(n) > 1:
                G.node[n]['name'] = n
                G.node[n]['central'] = 0
            else:
                G.remove_node(n)
        #jsonGraph = json.dumps(dict(nodes=[G.node[n] for n in G.nodes()],links=[{'source':u,'target':v, 'value':1} for u,v in G.edges()]))
        #G = MainHandler.findCommunity(self, G, 'Sin')

        G, numOfGroups = ga.groupGraph(G, 'Sin')
        bm, em, cm, dm = ga.Centrality(G, 'Sin')

        G.node[bm]['central'] = 1
        G.node[em]['central'] = 2
        G.node[cm]['central'] = 3
        G.node[dm]['central'] = 4

        jsonGraph = json_graph.dumps(G)
        self.response.out.write("""<!DOCTYPE html>
<meta charset="utf-8">
<style>

.node {
  stroke: #fff;
  stroke-width: 1.5px;
}

.link {
  stroke: #999;
  stroke-opacity: .6;
}

</style>
<body>
<script src="http://d3js.org/d3.v3.min.js"></script>
<script>

var width = 1000,
    height = 1000;

var color = d3.scale.category20();

var force = d3.layout.force()
    .charge(-500)
    .linkDistance(1)
    .linkStrength(0.6)
    .size([width, height]);

var svg = d3.select("body").append("svg")
    .attr("width", width)
    .attr("height", height);
    
var jsonGraph = %s

function drawG(graph) {
  force
      .nodes(graph.nodes)
      .links(graph.links)
      .start();

  var link = svg.selectAll("line.link")
      .data(graph.links)
      .enter().append("line")
      .attr("class", "link")
      .style("stroke-width", function(d) { return Math.sqrt(d.value); });

  var node = svg.selectAll("circle.node")
      .data(graph.nodes)
      .enter().append("circle")
      .attr("class", "node")
      .attr("r", 5)
      .style("fill", function(d) { return color(d.group); })
      .style("stroke", function(d) { return color(d.group + d.central); })
      .style("stroke-width", "4")
      .call(force.drag);

  node.append("title")
      .text(function(d) { return d.name; });

  force.on("tick", function() {
  
    for (var i=0;i<graph.nodes.length;i++)
    {
        if(graph.nodes[i].group == 0){
            graph.nodes[i].x = width / 2;
            graph.nodes[i].y = height / 2;
        }
    }
  
    link.attr("x1", function(d) { return d.source.x; })
        .attr("y1", function(d) { return d.source.y; })
        .attr("x2", function(d) { return d.target.x; })
        .attr("y2", function(d) { return d.target.y; });

    node.attr("cx", function(d) {return d.x; })
        .attr("cy", function(d) {return d.y; });
  });
}

drawG(jsonGraph);

</script>""" % jsonGraph)
Beispiel #33
0
def create_wm_mask(subject_id, subjects_dir, fs_dir, parcellation_name):
    iflogger.info("Create white matter mask")
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    pgpath = cmp_config._get_lausanne_parcellation(
        'Lausanne2008')[parcellation_name]['node_information_graphml']
    # load ribbon as basis for white matter mask
    fsmask = nb.load(op.join(fs_dir, 'mri', 'ribbon.nii.gz'))
    fsmaskd = fsmask.get_data()

    wmmask = np.zeros(fsmaskd.shape)
    # extract right and left white matter
    idx_lh = np.where(fsmaskd == 120)
    idx_rh = np.where(fsmaskd == 20)

    wmmask[idx_lh] = 1
    wmmask[idx_rh] = 1

    # remove subcortical nuclei from white matter mask
    aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
    asegd = aseg.get_data()

    try:
        import scipy.ndimage.morphology as nd
    except ImportError:
        raise Exception('Need scipy for binary erosion of white matter mask')

    # need binary erosion function
    imerode = nd.binary_erosion

    # ventricle erosion
    csfA = np.zeros(asegd.shape)
    csfB = np.zeros(asegd.shape)

    # structuring elements for erosion
    se1 = np.zeros((3, 3, 5))
    se1[1, :, 2] = 1
    se1[:, 1, 2] = 1
    se1[1, 1, :] = 1
    se = np.zeros((3, 3, 3))
    se[1, :, 1] = 1
    se[:, 1, 1] = 1
    se[1, 1, :] = 1

    # lateral ventricles, thalamus proper and caudate
    # the latter two removed for better erosion, but put back afterwards
    idx = np.where((asegd == 4) | (asegd == 43) | (asegd == 11) | (asegd == 50)
                   | (asegd == 31) | (asegd == 63) | (asegd == 10)
                   | (asegd == 49))
    csfA[idx] = 1
    csfA = imerode(imerode(csfA, se1), se)

    # thalmus proper and cuadate are put back because they are not lateral ventricles
    idx = np.where((asegd == 11) | (asegd == 50) | (asegd == 10)
                   | (asegd == 49))
    csfA[idx] = 0

    # REST CSF, IE 3RD AND 4TH VENTRICULE AND EXTRACEREBRAL CSF
    idx = np.where((asegd == 5) | (asegd == 14) | (asegd == 15) | (asegd == 24)
                   | (asegd == 44) | (asegd == 72) | (asegd == 75)
                   | (asegd == 76) | (asegd == 213) | (asegd == 221))
    # 43 ??, 4??  213?, 221?
    # more to discuss.
    for i in [5, 14, 15, 24, 44, 72, 75, 76, 213, 221]:
        idx = np.where(asegd == i)
        csfB[idx] = 1

    # do not remove the subthalamic nucleus for now from the wm mask
    # 23, 60
    # would stop the fiber going to the segmented "brainstem"

    # grey nuclei, either with or without erosion
    gr_ncl = np.zeros(asegd.shape)

    # with erosion
    for i in [10, 11, 12, 49, 50, 51]:
        idx = np.where(asegd == i)
        # temporary volume
        tmp = np.zeros(asegd.shape)
        tmp[idx] = 1
        tmp = imerode(tmp, se)
        idx = np.where(tmp == 1)
        gr_ncl[idx] = 1

    # without erosion
    for i in [13, 17, 18, 26, 52, 53, 54, 58]:
        idx = np.where(asegd == i)
        gr_ncl[idx] = 1

    # remove remaining structure, e.g. brainstem
    remaining = np.zeros(asegd.shape)
    idx = np.where(asegd == 16)
    remaining[idx] = 1

    # now remove all the structures from the white matter
    idx = np.where((csfA != 0) | (csfB != 0) | (gr_ncl != 0)
                   | (remaining != 0))
    wmmask[idx] = 0
    iflogger.info(
        "Removing lateral ventricles and eroded grey nuclei and brainstem from white matter mask"
    )

    # ADD voxels from 'cc_unknown.nii.gz' dataset
    ccun = nb.load(op.join(fs_dir, 'label', 'cc_unknown.nii.gz'))
    ccund = ccun.get_data()
    idx = np.where(ccund != 0)
    iflogger.info("Add corpus callosum and unknown to wm mask")
    wmmask[idx] = 1

    # check if we should subtract the cortical rois from this parcellation
    iflogger.info(
        "Loading %s to subtract cortical ROIs from white matter mask" %
        ('ROI_%s.nii.gz' % parcellation_name))
    roi = nb.load(op.join(op.curdir, 'ROI_%s.nii.gz' % parcellation_name))
    roid = roi.get_data()
    assert roid.shape[0] == wmmask.shape[0]
    pg = nx.read_graphml(pgpath)
    for brk, brv in pg.nodes(data=True):
        if brv['dn_region'] == 'cortical':
            iflogger.info("Subtracting region %s with intensity value %s" %
                          (brv['dn_region'], brv['dn_correspondence_id']))
            idx = np.where(roid == int(brv['dn_correspondence_id']))
            wmmask[idx] = 0

    # output white matter mask. crop and move it afterwards
    wm_out = op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz')
    img = nb.Nifti1Image(wmmask, fsmask.affine, fsmask.header)
    iflogger.info("Save white matter mask: %s" % wm_out)
    nb.save(img, wm_out)
Beispiel #34
0
 def _init_networkx_graph(self):
     if os.path.exists(self.GRAPH_PATH):
         self.DG = nx.read_graphml(self.GRAPH_PATH, node_type=int)
     else:
         self.DG = nx.DiGraph()
Beispiel #35
0
def main(simulated_time):

    random.seed(RANDOM_SEED)
    np.random.seed(RANDOM_SEED)
    """
    TOPOLOGY from a json
    """

    t = Topology()
    t.G = nx.read_graphml("Euclidean.graphml")
    t.G = nx.convert_node_labels_to_integers(t.G,
                                             first_label=0,
                                             ordering='default',
                                             label_attribute=None)

    print "Nodes: %i" % len(t.G.nodes())
    print "Edges: %i" % len(t.G.edges())
    #MANDATORY fields of a link
    # Default values =  {"BW": 1, "PR": 1}
    valuesOne = dict(itertools.izip(t.G.edges(), np.ones(len(t.G.edges()))))

    nx.set_edge_attributes(t.G, name='BW', values=valuesOne)
    nx.set_edge_attributes(t.G, name='PR', values=valuesOne)

    centrality = nx.betweenness_centrality(t.G)
    nx.set_node_attributes(t.G, name="centrality", values=centrality)

    sorted_clustMeasure = sorted(centrality.items(),
                                 key=operator.itemgetter(1),
                                 reverse=True)

    top20_devices = sorted_clustMeasure[:20]
    main_fog_device = copy.copy(top20_devices[0][0])

    print "-" * 20
    print "Top 20 centralised nodes:"
    for item in top20_devices:
        print item
    print "-" * 20
    """
    APPLICATION
    """
    app1 = create_application("app1")
    """
    PLACEMENT algorithm
    """
    #There are not modules to place.
    placement = NoPlacementOfModules("NoPlacement")
    """
    POPULATION algorithm
    """
    number_generators = int(len(t.G) * 0.1)
    print number_generators

    #you can use whatever funciton to change the topology
    dStart = deterministicDistributionStartPoint(0, 100, name="Deterministic")
    dStart2 = exponentialDistributionStartPoint(500,
                                                100.0,
                                                name="Deterministic")
    pop = Pop_and_Failures(name="mttf-nodes",
                           srcs=number_generators,
                           activation_dist=dStart2)
    pop.set_sink_control({
        "ids": top20_devices,
        "number": 1,
        "module": app1.get_sink_modules()
    })

    dDistribution = deterministicDistribution(name="Deterministic", time=10)
    pop.set_src_control({
        "number": 1,
        "message": app1.get_message("M.Action"),
        "distribution": dDistribution
    })

    #In addition, a source includes a distribution function:
    """--
    SELECTOR algorithm
    """
    selectorPath = BroadPath()
    """
    SIMULATION ENGINE
    """
    s = Sim(t, default_results_path="Results_%s_exp" % (simulated_time))
    s.deploy_app(app1, placement, pop, selectorPath)

    s.run(simulated_time,
          test_initial_deploy=False,
          show_progress_monitor=False)
    # s.draw_allocated_topology() # for debugging
    print "Total nodes available in the  toopology %i" % len(
        s.topology.G.nodes())
    print "Total edges available in the  toopology %i" % len(
        s.topology.G.edges())

    print pop.nodes_removed
    nx.write_graphml_lxml(s.topology.G, "final_network.graphml")
Beispiel #36
0
 def from_string(cls, string):
     s = StringIO()
     s.write(string)
     graph = read_graphml(s)
     model_instance = cls(graph)
     return model_instance
Beispiel #37
0
 def from_file(cls, filename):
     graph = read_graphml(filename)
     model_instance = cls(graph)
     return model_instance
Beispiel #38
0
#!/usr/bin/env python
import sys

import networkx as nx


def section():
    print('')


G = nx.read_graphml(sys.argv[1])

paper_ids = [p for p in G.nodes() if 'p' in p]
author_ids = [p for p in G.nodes() if 'a' in p]
institution_ids = [p for p in G.nodes() if 'i' in p]

print('{} papers. {} authors. {} institutions.'.format(len(paper_ids),
                                                       len(author_ids),
                                                       len(institution_ids)))

# Removing all institutions
[G.remove_node(p) for p in institution_ids]

section()
connected_components_sizes = sorted(
    [len(a) for a in nx.connected_components(G)], key=lambda x: -x)
print('The author-publication graph has {} nodes, {} connected components.'.
      format(len(G.nodes()), len(connected_components_sizes)))
print('Their sizes: {}, etc.'.format(connected_components_sizes[0:10]))

from collections import Counter
Beispiel #39
0
file = glob.glob('2015-1*.xlsx')  #for workingfile in filelist
print file, 'is working now'
df = pd.ExcelFile(file[0])
df = pd.read_excel(df, 'Sheet1')
df = pd.DataFrame(df)

G, top_nodes = bipartite_graph(df)
V = stock_network(G, top_nodes)
V = remove_edges(V, 0)
print '----'

os.chdir(
    '/Users/shine/work_hard/financial_network/data/threshold/stock_alpha/0.95_2015-1'
)
#t_list = ['601318.SH', '601166.SH', '600036.SH', '600016.SH', '600030.SH','600000.SH','300059.SZ','002183.SZ','000002.SZ']

w = weight(V)
w = pd.DataFrame(w)
beta = w.quantile(q=0.95)
print beta

V = remove_edges(V, beta)

print 'nodes_num', nx.number_of_nodes(V)
print 'edges_num', nx.number_of_edges(V)

os.chdir('/Users/shine/work_hard/financial_network/data/threshold/stock_alpha')
V = nx.read_graphml('2015-1_95.graphml')
print 'nodes_num', nx.number_of_nodes(V)
print 'edges_num', nx.number_of_edges(V)
    t_15m = make_g_path('15m', threshold)

    fsa_v_60m = make_g_path('60m', fsa_v)
    knn_60m = make_g_path('60m', knn)
    t_60m = make_g_path('60m', threshold)

    fsa_v_360m = make_g_path('360m', fsa_v)
    knn_360m = make_g_path('360m', knn)
    t_360m = make_g_path('360m', threshold)

    fsa_v_1440m = make_g_path('1440m', fsa_v)
    knn_1440m = make_g_path('1440m', knn)
    t_1440m = make_g_path('1440m', threshold)

    log('loading the 15m graphs')
    fsa_v_15m_g = nx.read_graphml(fsa_v_15m)
    knn_15m_g = nx.read_graphml(knn_15m)
    t_15m_g = nx.read_graphml(t_15m)

    log('loading the 60m graphs')
    fsa_v_60m_g = nx.read_graphml(fsa_v_60m)
    knn_60m_g = nx.read_graphml(knn_60m)
    t_60m_g = nx.read_graphml(t_60m)

    log('loading the 360m graphs')
    fsa_v_360m_g = nx.read_graphml(fsa_v_360m)
    knn_360m_g = nx.read_graphml(knn_360m)
    t_360m_g = nx.read_graphml(t_360m)

    log('loading the 1440m graphs')
    fsa_v_1440m_g = nx.read_graphml(fsa_v_1440m)
Beispiel #41
0
		plt.show()

	sur = 1
	sz=5
	if sur is not None:
		sz=40
	analysand = 'Root_ar'#'Lemma_ar'
	node_of_interest = u'كون'#u'حقق'#u'ذكر'#'*kr'#'kwn'#'qwl' #u'ذكر' u'ارض'. 'حرم' 'فعل' 'حرم' 'ﻏﻀﺐ'
	method = 'breadth'
	
	quran, qtoc = load_data_from_csv(path = path + 'data/')
	if sur is not None:
		quran = quran[quran.sura == sur].reset_index()

	# second = quran.groupby(['aya','word']).agg('FORM_ar').apply(''.join).reset_index()
	I = nx.read_graphml(path + 'graphml/' + arabic_to_buc(node_of_interest) + '.graphml')
	root_in_edges = pd.DataFrame.from_dict(nx.get_edge_attributes(I, 'pos'), orient='index', columns=['pos'])
	root_in_edges[['sura','aya']] = root_in_edges['pos'].str.split(':', expand=True).astype('int64')
	print(qtoc.info())
	print(root_in_edges.info())
	print(root_in_edges.head())
	d = root_in_edges.reset_index() \
		.drop('pos',1) \
		.merge(qtoc[['No.','Place','Chronology']], 
			left_on='sura',
			right_on='No.', 
			how='left') \
		.drop('No.',1)
	print(d.head())
	print(len(d[d.Place == 'Meccan']), np.sort(d[d.Place == 'Meccan'].sura.unique()))
	print(len(d[d.Place == 'Medinan']), np.sort(d[d.Place == 'Medinan'].sura.unique()))
Beispiel #42
0
def create_roi(subject_id, subjects_dir):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """

    print("Create the ROIs:")
    fs_dir = op.join(subjects_dir,subject_id)
    
    # load aseg volume
    aseg = ni.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
    asegd = aseg.get_data()	# numpy.ndarray

    # identify cortical voxels, right (3) and left (42) hemispheres
    idxr = np.where(asegd == 3)
    idxl = np.where(asegd == 42)
    xx = np.concatenate((idxr[0],idxl[0]))
    yy = np.concatenate((idxr[1],idxl[1]))
    zz = np.concatenate((idxr[2],idxl[2]))

    # initialize variables necessary for cortical ROIs dilation
    # dimensions of the neighbourhood for rois labels assignment (choose odd dimensions!)
    shape = (25,25,25)
    center = np.array(shape) // 2
    # dist: distances from the center of the neighbourhood
    dist = np.zeros(shape, dtype='float32')
    for x in range(shape[0]):
        for y in range(shape[1]):
            for z in range(shape[2]):
                distxyz = center - [x,y,z]
                dist[x,y,z] = math.sqrt(np.sum(np.multiply(distxyz,distxyz)))

    # LOOP throughout all the SCALES
    # (from the one with the highest number of region to the one with the lowest number of regions)
    #parkeys = gconf.parcellation.keys()
    scales = get_parcellation('Lausanne2008').keys()
    values = list()
    for i in range(len(scales)):
        values.append(get_parcellation('Lausanne2008')[scales[i]]['number_of_regions'])
    temp = zip(values, scales)
    temp.sort(reverse=True)
    values, scales = zip(*temp)
    roisMax = np.zeros( (256, 256, 256), dtype=np.int16 ) # numpy.ndarray
    for i,parkey in enumerate(get_parcellation('Lausanne2008').keys()):
        parval = get_parcellation('Lausanne2008')[parkey]

        print("Working on parcellation: " + parkey)
        print("========================")
        pg = nx.read_graphml(parval['node_information_graphml'])

        # each node represents a brain region
        # create a big 256^3 volume for storage of all ROIs
        rois = np.zeros( (256, 256, 256), dtype=np.int16 ) # numpy.ndarray

        for brk, brv in pg.nodes_iter(data=True):   # slow loop

            if brv['dn_hemisphere'] == 'left':
                hemi = 'lh'
            elif brv['dn_hemisphere'] == 'right':
                hemi = 'rh'

            if brv['dn_region'] == 'subcortical':

                print("---------------------")
                print("Work on brain region: %s" % (brv['dn_region']) )
                print("Freesurfer Name: %s" %  brv['dn_fsname'] )
                print("---------------------")

                # if it is subcortical, retrieve roi from aseg
                idx = np.where(asegd == int(brv['dn_fs_aseg_val']))
                rois[idx] = int(brv['dn_correspondence_id'])

            elif brv['dn_region'] == 'cortical':
                print("---------------------")
                print("Work on brain region: %s" % (brv['dn_region']) )
                print("Freesurfer Name: %s" %  brv['dn_fsname'] )
                print("---------------------")

                labelpath = op.join(fs_dir, 'label', parval['fs_label_subdir_name'] % hemi)

                # construct .label file name
                fname = '%s.%s.label' % (hemi, brv['dn_fsname'])

                # execute fs mri_label2vol to generate volume roi from the label file
                # store it in temporary file to be overwritten for each region (slow!)
                #mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (op.join(labelpath, fname),
                #        op.join(fs_dir, 'mri', 'orig.mgz'), op.join(labelpath, 'tmp.nii.gz'))
                #runCmd( mri_cmd, log )
                mri_cmd = ['mri_label2vol','--label',op.join(labelpath, fname),'--temp',op.join(fs_dir, 'mri', 'orig.mgz'),'--o',op.join(labelpath, 'tmp.nii.gz'),'--identity']
                subprocess.check_call(mri_cmd)

                tmp = ni.load(op.join(labelpath, 'tmp.nii.gz'))
                tmpd = tmp.get_data()

                # find voxel and set them to intensity value in rois
                idx = np.where(tmpd == 1)
                rois[idx] = int(brv['dn_correspondence_id'])

        newrois = rois.copy()
        # store scale500 volume for correction on multi-resolution consistency
        if i == 0:
            print("Storing ROIs volume maximal resolution...")
            roisMax = rois.copy()
            idxMax = np.where(roisMax > 0)
            xxMax = idxMax[0]
            yyMax = idxMax[1]
            zzMax = idxMax[2]
        # correct cortical surfaces using as reference the roisMax volume (for consistency between resolutions)
        else:
            print("Adapt cortical surfaces...")
            #adaptstart = time()
            idxRois = np.where(rois > 0)
            xxRois = idxRois[0]
            yyRois = idxRois[1]
            zzRois = idxRois[2]
            # correct voxels labeled in current resolution, but not labeled in highest resolution
            for j in range(xxRois.size):
                if ( roisMax[xxRois[j],yyRois[j],zzRois[j]]==0 ):
                    newrois[xxRois[j],yyRois[j],zzRois[j]] = 0;
            # correct voxels not labeled in current resolution, but labeled in highest resolution
            for j in range(xxMax.size):
                if ( newrois[xxMax[j],yyMax[j],zzMax[j]]==0 ):
                    local = extract(rois, shape, position=(xxMax[j],yyMax[j],zzMax[j]), fill=0)
                    mask = local.copy()
                    mask[np.nonzero(local>0)] = 1
                    thisdist = np.multiply(dist,mask)
                    thisdist[np.nonzero(thisdist==0)] = np.amax(thisdist)
                    value = np.int_(local[np.nonzero(thisdist==np.amin(thisdist))])
                    if value.size > 1:
                        counts = np.bincount(value)
                        value = np.argmax(counts)
                    newrois[xxMax[j],yyMax[j],zzMax[j]] = value
            #print("Cortical ROIs adaptation took %s seconds to process." % (time()-adaptstart))

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.join(fs_dir, 'label', 'ROI_%s.nii.gz' % parkey)
        # update the header
        hdr = aseg.get_header()
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)
        print("Save output image to %s" % out_roi)
        img = ni.Nifti1Image(newrois, aseg.get_affine(), hdr2)
        ni.save(img, out_roi)

        # dilate cortical regions
        print("Dilating cortical regions...")
        #dilatestart = time()
        # loop throughout all the voxels belonging to the aseg GM volume
        for j in range(xx.size):
            if newrois[xx[j],yy[j],zz[j]] == 0:
                local = extract(rois, shape, position=(xx[j],yy[j],zz[j]), fill=0)
                mask = local.copy()
                mask[np.nonzero(local>0)] = 1
                thisdist = np.multiply(dist,mask)
                thisdist[np.nonzero(thisdist==0)] = np.amax(thisdist)
                value = np.int_(local[np.nonzero(thisdist==np.amin(thisdist))])
                if value.size > 1:
                    counts = np.bincount(value)
                    value = np.argmax(counts)
                newrois[xx[j],yy[j],zz[j]] = value
        #print("Cortical ROIs dilation took %s seconds to process." % (time()-dilatestart))

        # store volume eg in ROIv_scale33.nii.gz
        out_roi = op.join(fs_dir, 'label', 'ROIv_%s.nii.gz' % parkey)
        print("Save output image to %s" % out_roi)
        img = ni.Nifti1Image(newrois, aseg.get_affine(), hdr2)
        ni.save(img, out_roi)

    print("[ DONE ]")
Beispiel #43
0
    mp.freeze_support()

    parser = argparse.ArgumentParser(description='Compute 2-clubs of a graph.')
    parser.add_argument('graph')
    parser.add_argument('hubs',
                        metavar='Hub',
                        type=int,
                        nargs='+',
                        help='number of workers for the hub')
    group = parser.add_argument_group()
    group.add_argument('-b',
                       '--borough',
                       help='The borough result file to use.')
    group.add_argument(
        '-bn',
        '--borough_number',
        help='The id number of the borough. Default 0 = largest.',
        default=0)

    args = parser.parse_args()

    G = nx.read_graphml(args.graph)

    if args.borough:
        boroughs = pickle.load(open(args.borough))
        B = nx.Graph()
        B.add_edges_from(boroughs[args.borough_number])
        find_clubs(B, args.hubs)
    else:
        find_clubs(G, args.hubs)
Beispiel #44
0
def main():

    time = localtime()
    print("Started at:",
          str(time[3]) + ":" + str(time[4]) + ":" + str(time[5]),
          str(time[1]) + "/" + str(time[2]) + "/" + str(time[0]))
    chdir(sys.path[0])
    if not path.exists("./Saídas"):
        mkdir("Saídas")
    chdir("./Saídas")
    fileList = listdir("./")
    for file in fileList:
        remove("./" + file)
    """ entrada de dados (abrindo arquivos) """
    OrigemLoc = abrir_arquivo("../Entradas/locomotivas")
    Manobras = abrir_arquivo("../Entradas/manobras")
    ''' opening graphml... only undirected graph '''
    layoutPatio = read_graphml("../Entradas/uvaranas.graphml")

    maxIterationParameter = [5, 10, 15, 20, 30, 40, 50, 100, 200, 500]
    alphaParameter = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]
    horizonteTempo = 360
    menorTempoAresta = 1

    for i in range(len(Manobras)):
        man = Manobras[i]
        man[1] = pega_par_nos(layoutPatio, man[1])
        man[2] = pega_par_nos(layoutPatio, man[2])
        Manobras[i] = man
    nomeLocomotivas = []
    for i in range(len(OrigemLoc)):
        ol = pega_par_nos(layoutPatio, OrigemLoc[i][1])
        OrigemLoc[i] = (OrigemLoc[i][0], ol)
        nomeLocomotivas.append(OrigemLoc[i][0])
    results = {}
    for maxI in maxIterationParameter:
        for alpha in alphaParameter:
            for j in range(100):
                Matriz = gerar_matriz_alocacao(list(layoutPatio.edges()),
                                               horizonteTempo,
                                               menorTempoAresta)
                construir_solucao(layoutPatio, Manobras, OrigemLoc, Matriz,
                                  horizonteTempo, menorTempoAresta, "1", alpha)
                melhorSolucao = Matriz.copy()
                melhorTempoMaior, melhorTempoMenor, melhorTempoMaior = avaliadorSolucao(
                    melhorSolucao, nomeLocomotivas)
                aux = melhorTempoMaior

                i = 1
                while i < maxI:
                    Matriz = gerar_matriz_alocacao(list(layoutPatio.edges()),
                                                   horizonteTempo,
                                                   menorTempoAresta)
                    construir_solucao(layoutPatio, Manobras, OrigemLoc,
                                      Matriz, horizonteTempo, menorTempoAresta,
                                      str(i), alpha)
                    tempoMedio, tempoMenor, tempoMaior = avaliadorSolucao(
                        Matriz, nomeLocomotivas)

                    if tempoMaior < melhorTempoMaior:
                        melhorSolucao = Matriz.copy()
                        melhorTempoMaior = tempoMaior
                    i += 1
                if (maxI, alpha) in results:
                    results[(maxI, alpha)].append((aux, melhorTempoMaior))
                else:
                    results[(maxI, alpha)] = [(aux, melhorTempoMaior)]
                print("m", maxI, "a", alpha, "nRes", j + 1)
    imprime_dic_arquivo(results, "./results5")

    time = localtime()
    print("Finished at:",
          str(time[3]) + ":" + str(time[4]) + ":" + str(time[5]),
          str(time[1]) + "/" + str(time[2]) + "/" + str(time[0]))
    return 0
Beispiel #45
0
model.route_no = Set()
model.nRoutes = Param(within=NonNegativeIntegers)
model.nLayers = Param(within=NonNegativeIntegers)
model.largeNumber = Param(within=NonNegativeIntegers)
model.budget = Param(within=NonNegativeIntegers)
model.nRoadSegs = Param(within=NonNegativeIntegers)
model.arcs = Set(within=model.route_no * model.nodes * model.nodes)
model.weight = Param(model.arcs, within=NonNegativeReals)
model.roadSegs = Set()
model.roadInstall = Var(model.roadSegs, within=Boolean)
model.x = Var(model.arcs, within=Boolean)
model.costInstall = Param(model.roadSegs)
model.boundary_nodes = Set(within=model.route_no * model.nodes)
model.boundary_node_weights = Param(model.boundary_nodes)

model.graph = nx.read_graphml("graph_data/greenville1_5.graphml")
for edge in model.graph.edges():
    u, v = edge
    model.graph[u][v]['time'] = model.graph[u][v][
        'weight'] * 0.000621371 / model.graph.node[u]['speed_urban'] * 3.6  #

model.Routes = {}
#read routes from file
myFile = open("routes.txt", "r")
count = 1
for line in myFile:
    lines_split = line.split()
    route = [int(r) for r in lines_split]
    model.Routes[count] = route
    count += 1
myFile.close()
out_graphs = []
[out_graphs.append(i) for i in nx_graphs_raw.values()]
[print(i) for i in nx_graphs_raw.keys()]
save_names = ["Gaa", "Gad", "Gda", "Gdd"]
[out_graphs.append(i) for i in nx_graphs_norm.values()]
[print(i) for i in nx_graphs_norm.keys()]
save_names += ["Gaan", "Gdan", "Gadn", "Gddn"]
out_graphs.append(nx_all_raw)
save_names.append("G")
out_graphs.append(nx_all_norm)
save_names.append("Gn")

for name, graph in zip(save_names, out_graphs):
    nx.write_graphml(graph, output_path / (name + ".graphml"))

meta_data_df.to_csv(output_path / "meta_data.csv")

#%% verify things are right
print("\n\n\n\nChecking graphs are the same when saved")
for name, graph_wrote in zip(save_names, out_graphs):
    print(name)
    graph_read = nx.read_graphml(output_path / (name + ".graphml"))
    adj_read = nx.to_numpy_array(graph_read)
    adj_wrote = nx.to_numpy_array(graph_wrote)
    print(np.array_equal(adj_read, adj_wrote))
    graph_loader = load_networkx(name, version=data_date_graphs)
    adj_loader = nx.to_numpy_array(graph_loader)
    print(np.array_equal(adj_wrote, adj_loader))
    print()
def load_graphml(filename, folder=None):
    """
    Load a GraphML file from disk and convert the node/edge attributes to correct data types.

    Parameters
    ----------
    filename : string
        the name of the graphml file (including file extension)
    folder : string
        the folder containing the file, if None, use default data folder

    Returns
    -------
    networkx multidigraph
    """
    start_time = time.time()

    # read the graph from disk
    if folder is None:
        folder = globals.data_folder
    path = '{}/{}'.format(folder, filename)
    G = nx.MultiDiGraph(nx.read_graphml(path, node_type=int))

    # convert graph crs attribute from saved string to correct dict data type
    G.graph['crs'] = ast.literal_eval(G.graph['crs'])

    if 'streets_per_node' in G.graph:
        G.graph['streets_per_node'] = ast.literal_eval(G.graph['streets_per_node'])

    # convert numeric node tags from string to numeric data types
    log('Converting node and edge attribute data types')
    for node, data in G.nodes(data=True):
        data['osmid'] = int(data['osmid'])
        data['x'] = float(data['x'])
        data['y'] = float(data['y'])

    # convert numeric, bool, and list node tags from string to correct data types
    for u, v, key, data in G.edges(keys=True, data=True):

        # first parse oneway to bool and length to float - they should always have only 1 value each
        data['oneway'] = ast.literal_eval(data['oneway'])
        data['length'] = float(data['length'])

        # these attributes might have a single value, or a list if edge's topology was simplified
        for attr in ['highway', 'name', 'bridge', 'tunnel', 'lanes', 'ref', 'maxspeed', 'service', 'access', 'area', 'landuse', 'width', 'est_width']:
            # if this edge has this attribute, and it starts with '[' and ends with ']', then it's a list to be parsed
            if attr in data and data[attr][0] == '[' and data[attr][-1] == ']':
                # convert the string list to a list type, else leave as single-value string
                data[attr] = ast.literal_eval(data[attr])

        # osmid might have a single value or a list, but if single value, then parse int
        if 'osmid' in data:
            if data['osmid'][0] == '[' and data['osmid'][-1] == ']':
                data['osmid'] = ast.literal_eval(data['osmid'])
            else:
                data['osmid'] = int(data['osmid'])

        # if geometry attribute exists, load the string as well-known text to shapely LineString
        if 'geometry' in data:
            data['geometry'] = wkt.loads(data['geometry'])

    # remove node_default and edge_default metadata keys if they exist
    if 'node_default' in G.graph:
        del G.graph['node_default']
    if 'edge_default' in G.graph:
        del G.graph['edge_default']

    log('Loaded graph with {:,} nodes and {:,} edges in {:,.2f} seconds from "{}"'.format(len(list(G.nodes())),
                                                                                          len(list(G.edges())),
                                                                                          time.time()-start_time,
                                                                                          path))
    return G
Beispiel #48
0
def incweight_path(path):
    for i in path:
        incweight(i)


if __name__ == "__main__":
    predef.nodes = {}
    predef.links = {}
    predef.ip2portTable = {}
    nodes = predef.nodes
    links = predef.links
    ip2portTable = predef.ip2portTable
    topology_name = "Noel"
    filename = "/home/yutao/Desktop/networkmap-evaluation/rsa-eval/dataset/topologyzoo/sources/" + topology_name + ".graphml"
    topo = nx.read_graphml(filename).to_undirected()

    # labels={}
    # for n in topo.adj:
    #     labels[n]=n
    # pos = nx.spring_layout(topo)
    # nx.draw(topo,pos=pos)
    # nx.draw_networkx_labels(topo,pos,labels=labels)
    # plt.show()

    statemachine_pos = {'n0'}

    for n in topo.adj:
        predef.nodes[n] = []

    for s in statemachine_pos:
    file_eco = open(output_dir + '/output_ecosystem.csv', 'w')
    out_eco = csv.DictWriter(file_eco, header_names)

    ###this is for python < 2.7
    #    headers_dict = dict()
    #    for n in header_names:
    #        headers_dict[n] = n
    #
    #    out_eco.writerow(headers_dict)

    ### for python >= 2.7 comment the above block and uncomment the following line
    out_eco.writeheader()

    network_file = output_dir + '/' + SRC_NET_FILE
    if READ_FILE_NETWORK:
        graph = nx.read_graphml(network_file)
        net = Network(graph)

        print('connectance = ', net.connectance())

        tls = net.get_trophic_levels()

        top, top_preds = net.top_predators()
        basal, basal_sps = net.basal()
        for u, v in net.edges():
            if u in basal_sps and v in top_preds and tls[v] == 3:
                net.remove_edge(u, v)

        print('new connectance = ', net.connectance())
    else:
        net = obtain_interactions_network()
Beispiel #50
0
import networkx as nx
import numpy as np
from numpy.linalg import norm
from sklearn.preprocessing import normalize
from sklearn.cluster import KMeans
from handler import Handler
import collections
import queue
import operator
import sys
import csv



G = nx.read_graphml("../Osmnx_large_trips_graph_75000.graphml")
data = 'trips'
# G = nx.read_edgelist('file/edgeTestPickme/edgeList.txt',nodetype=int, data=(('weight',float),))
# data = 'weight'

print(len(G.nodes()))


with open("alpha_2.txt") as f:
    datas = f.read()
# print(data.split(']')[0])
partitionArray = []
nodeCounter = 0
for k in range(0, 1626):
    partition_data = datas.split('\n')[k].replace('{','').replace('}', '').split(', ')
    tempartition = []
Beispiel #51
0
    def FGCSmodel3(self, populationSeed):

        fogResources = "self.rnd.randint(4,10)"
        servResources = "self.rnd.randint(1,4)"
        cloudResources = float('inf')
        netLatency = "self.rnd.randint(75,125)"
        cloudLatency = 100.0
        fogSpeed = 100
        cloudSpeed = 1000
        #        cnf = config.CONFIG()
        self.rnd = random.Random()
        self.rnd.seed(populationSeed)
        #        with open(cnf.topologyJson,'r') as json_data:
        #            myjson = json.load(json_data)
        #            json_data.close()
        #

        #******************************************************************************************
        #   Topology definition
        #******************************************************************************************

        if self.cnf.topologyGraphml.startswith('nxgenerated'):
            tmp, numNodes = self.cnf.topologyGraphml.split('.')
            self.cnf.numberOfIoTGateways = int(int(numNodes) * 0.2)
            G = nx.barabasi_albert_graph(n=int(numNodes), m=2)
        else:
            G = nx.read_graphml(self.cnf.topologyGraphml)
            G = nx.convert_node_labels_to_integers(G,
                                                   first_label=0,
                                                   ordering='default',
                                                   label_attribute=None)

        centralityMeasure = nx.betweenness_centrality(G)
        sorted_clustMeasure = sorted(centralityMeasure.items(),
                                     key=operator.itemgetter(1),
                                     reverse=True)

        self.fogNumber = len(G.nodes)
        self.devDistanceMatrix = [[0 for j in xrange(self.fogNumber)]
                                  for i in xrange(self.fogNumber)]
        self.fogResources = list()
        self.fogSpeedCPU = list()
        centralityBoundary = sorted_clustMeasure[self.cnf.numberOfIoTGateways *
                                                 -1][1]
        gatewayCandidates = list()
        for i in list(G.nodes):
            if i == sorted_clustMeasure[0][0]:
                self.cloudDeviceId = i
                self.fogResources.append(
                    cloudResources)  #cantidad de recursos para el cloud
                self.fogSpeedCPU.append(
                    cloudSpeed)  #velocidad de CPU para el cloud
            else:
                self.fogResources.append(eval(
                    fogResources))  #cantidad de recursos para los fog devices
                self.fogSpeedCPU.append(
                    fogSpeed)  #velocidad de CPU para los fog devices
            if centralityMeasure[i] <= centralityBoundary:
                gatewayCandidates.append(i)

        self.rnd.shuffle(gatewayCandidates)
        gatewayList = gatewayCandidates[:self.cnf.numberOfIoTGateways]

        #print gatewayList

        nx.set_node_attributes(G, 'fog', 'nodetype')
        for i in gatewayList:
            G.node[i]['nodetype'] = 'gateway'
        G.node[self.cloudDeviceId]['nodetype'] = 'cloud'
        nx.write_graphml(G, self.cnf.topologyGraphml + '.labelled')

        for s, t in list(G.edges):
            if s == self.cloudDeviceId or t == self.cloudDeviceId:
                G[s][t][
                    'weight'] = cloudLatency  #network latency for the connections with the cloud
            else:
                G[s][t]['weight'] = eval(
                    netLatency)  #network latency between fog devices

        for i in range(0, len(G.nodes)):
            for j in range(i, len(G.nodes)):

                mylength = nx.shortest_path_length(G,
                                                   source=i,
                                                   target=j,
                                                   weight="weight")
                self.devDistanceMatrix[i][j] = mylength
                self.devDistanceMatrix[j][i] = mylength

        #TODO pensar en mejorar como calcular la normalizacion del tiempo de makespan
        #quizás sería interesante pensar en usar la distancia media entre los dispositivos y el cloud, que
        #se puede calcular con la siguiente instruccion
        #DISTs = nx.single_source_dijkstra_path_length(G,self.cloudDeviceId)
        #d = float(sum(DISTs.values())) / (len(DISTs)-1) #Tengo que hacerlo menos 1 porque uno de ellos es el propio cloud que la distancia vale 0
        #o quizas hacerlo con el maximo con max(DISTs.values())

        #y la isguiente linea si queremos la distancia minima entre todos los nodos
        #self.averagePathLength = nx.average_shortest_path_length(G, weight="weight")-cloudLatency

        DISTs = nx.single_source_dijkstra_path_length(G, self.cloudDeviceId)
        d = max(DISTs.values())
        self.averagePathLength = d - cloudLatency
        self.totalResources = np.sum(self.fogResources[0:self.cloudDeviceId] +
                                     self.fogResources[self.cloudDeviceId +
                                                       1:])

        #******************************************************************************************
        #   Applicaiton definition
        #******************************************************************************************

        f = open(self.cnf.applicationJson, 'r')
        txtjson = f.read()
        f.close()
        myjson = json.loads(txtjson)

        self.serviceResources = list()
        for appInst in myjson:
            for moduleInst in appInst["module"]:
                #TODO                self.serviceResources.append(moduleInst["RAM"])
                #Si queremos que sean los valores del json, hay que borrar la siguiente linea y descomentar la anterior.
                self.serviceResources.append(eval(servResources))

        self.serviceNumber = len(self.serviceResources)
        self.serviceMatrix = [[
            0 for j in xrange(self.serviceNumber *
                              self.cnf.numberOfReplicatedApps)
        ] for i in xrange(self.serviceNumber * self.cnf.numberOfReplicatedApps)
                              ]

        for appInst in myjson:
            for messageInst in appInst["message"]:
                for shift in range(0, self.cnf.numberOfReplicatedApps):
                    st = shift * self.serviceNumber
                    self.serviceMatrix[st +
                                       messageInst["s"]][st +
                                                         messageInst["d"]] = 1

#******************************************************************************************
#   User connection/gateways definition
#******************************************************************************************

        f = open(self.cnf.userJson, 'r')
        txtjson = f.read()
        f.close()
        myjson = json.loads(txtjson)

        requestedServicesSet = set()
        self.mobilePlacementMatrix = [
            list() for i in xrange(self.serviceNumber)
        ]
        for appInst in myjson:
            for mobileInst in appInst["mobile"]:
                self.mobilePlacementMatrix[mobileInst["serviceId"]].append(
                    mobileInst["gatewayId"])
                requestedServicesSet.add(mobileInst["serviceId"])

        userRequestedServicesTemp = list(requestedServicesSet)
        userRequestedServices = list()
        for shift in range(0, self.cnf.numberOfReplicatedApps):
            for i in iter(userRequestedServicesTemp):
                userRequestedServices.append(i + (shift * self.serviceNumber))

        #cogemos aleatoriamente un nodo de los que son gateway y asignamos ahi el primer IoTdevice
        #a partir de ahi buscamos tantos nodos mas cercanos como iotdevices de ese servicio tengamos
        #que desplegar
        self.mobilePlacementMatrix = [
            list() for i in xrange(self.serviceNumber *
                                   self.cnf.numberOfReplicatedApps)
        ]
        numberOfRepeatedIoTDevices = self.cnf.numberOfIoTGateways * self.cnf.numberofIoTDevicesPerGw / self.cnf.numberOfReplicatedApps
        for idServ in iter(userRequestedServices):
            gwId = gatewayList[self.rnd.randint(0, len(gatewayList) - 1)]
            self.mobilePlacementMatrix[idServ].append(gwId)
            candidateNeighbords = copy.copy(gatewayList)
            candidateNeighbords.remove(gwId)
            for i in range(0, numberOfRepeatedIoTDevices - 1):
                minDist = float('inf')
                neighbordId = -1
                for jGw in iter(candidateNeighbords):
                    if self.devDistanceMatrix[jGw][gwId] < minDist:
                        minDist = self.devDistanceMatrix[jGw][gwId]
                        neighbordId = jGw
                self.mobilePlacementMatrix[idServ].append(neighbordId)
                candidateNeighbords.remove(neighbordId)

        tmpServiceResources = copy.copy(self.serviceResources)
        self.serviceNumber = self.serviceNumber * self.cnf.numberOfReplicatedApps
        for i in range(0, self.cnf.numberOfReplicatedApps - 1):
            self.serviceResources = self.serviceResources + copy.copy(
                tmpServiceResources)
THRESHOLD = 0.1
def calc_weight(config1, config2):
    return math.sqrt(float(np.sum((config2-config1)**2)))

def state_to_numpy(state):
    strlist = state.split()
    val_list = [float(s) for s in strlist]
    return np.array(val_list) 

def save_modified_graph(G):
    file_addr = "graphs/shallow_graph.graphml"
    to_remove = []
    for i, edge in enumerate(G.edges()):
        u, v = edge
        G[u][v]['weight'] = calc_weight(state_to_numpy(G.node[u]['state']), state_to_numpy(G.node[v]['state']))
        if(G[u][v]['weight']>THRESHOLD):
            to_remove.append([u, v])
    for edge in to_remove:
        u, v = edge
        G.remove_edge(u, v)        
    nx.write_graphml(G, file_addr)    

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Generate environments')
    parser.add_argument('--graphfile',type=str,required=True)
    args = parser.parse_args()
    
    G = nx.read_graphml(args.graphfile)

    save_modified_graph(G)
        else:
            total_table[tag] = 1
    return


def caculate_probability(remain_table, total_table, inital_probabolity):
    for key in remain_table.keys():
        probability = float(remain_table[key]) / total_table[key]
        inital_probabolity[key] = probability
    return


origin_file_name = 'svm_classified.graphml'
result_file_name = 'inital_probabolity.json'
remain_table = {}
total_table = {}
inital_probabolity = {}

with open(origin_file_name, mode='r') as f:
    G = nx.read_graphml(f)

for v in G.nodes():
    if 'tweet' not in G.node[v]:
        continue
    tag_list = get_tags_from_tweet(G.node[v]['tweet'])
    count_tweet(remain_table, total_table, tag_list, G.node[v]['brexit'])
caculate_probability(remain_table, total_table, inital_probabolity)

with open(result_file_name, mode='w') as f:
    json.dump(inital_probabolity, f, sort_keys=True)
import json
from pprint import pprint
#import pygraphviz
import sys
import argparse
import networkx as nx
import random
import numpy as np
import matplotlib.pyplot as plt
from scipy import sparse
import scipy
from networkx.drawing.nx_agraph import graphviz_layout


json_filename = '/home/hushiji/Research/smart_cities/smart_cities/general_soc/greenville_exp.json'
roadSegGraph = nx.read_graphml("/home/hushiji/Research/smart_cities/smart_cities/real_data_sol/graph_data/greenville1_5.graphml")


seg_installFile = open("/home/hushiji/Research/smart_cities/smart_cities/general_soc/analysis/exp_seg_install.txt", "w")



with open(json_filename) as data_file:    
    data = json.load(data_file)

Install_seg = []
variables = data['Solution'][1]['Variable']
for key in variables:
  
  if key[0] == 'r':
Beispiel #55
0
    def test_yfiles_extension(self):
        data = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xmlns:y="http://www.yworks.com/xml/graphml"
         xmlns:yed="http://www.yworks.com/xml/yed/3"
         xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns
         http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">
  <!--Created by yFiles for Java 2.7-->
  <key for="graphml" id="d0" yfiles.type="resources"/>
  <key attr.name="url" attr.type="string" for="node" id="d1"/>
  <key attr.name="description" attr.type="string" for="node" id="d2"/>
  <key for="node" id="d3" yfiles.type="nodegraphics"/>
  <key attr.name="Description" attr.type="string" for="graph" id="d4">
    <default/>
  </key>
  <key attr.name="url" attr.type="string" for="edge" id="d5"/>
  <key attr.name="description" attr.type="string" for="edge" id="d6"/>
  <key for="edge" id="d7" yfiles.type="edgegraphics"/>
  <graph edgedefault="directed" id="G">
    <node id="n0">
      <data key="d3">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="125.0" y="100.0"/>
          <y:Fill color="#FFCC00" transparent="false"/>
          <y:BorderStyle color="#000000" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content"
           borderDistance="0.0" fontFamily="Dialog" fontSize="13"
           fontStyle="plain" hasBackgroundColor="false" hasLineColor="false"
           height="19.1328125" modelName="internal" modelPosition="c"
           textColor="#000000" visible="true" width="12.27099609375"
           x="8.864501953125" y="5.43359375">1</y:NodeLabel>
          <y:Shape type="rectangle"/>
        </y:ShapeNode>
      </data>
    </node>
    <node id="n1">
      <data key="d3">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="183.0" y="205.0"/>
          <y:Fill color="#FFCC00" transparent="false"/>
          <y:BorderStyle color="#000000" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content"
          borderDistance="0.0" fontFamily="Dialog" fontSize="13"
          fontStyle="plain" hasBackgroundColor="false" hasLineColor="false"
          height="19.1328125" modelName="internal" modelPosition="c"
          textColor="#000000" visible="true" width="12.27099609375"
          x="8.864501953125" y="5.43359375">2</y:NodeLabel>
          <y:Shape type="rectangle"/>
        </y:ShapeNode>
      </data>
    </node>
    <edge id="e0" source="n0" target="n1">
      <data key="d7">
        <y:PolyLineEdge>
          <y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="standard"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
    </edge>
  </graph>
  <data key="d0">
    <y:Resources/>
  </data>
</graphml>
"""
        fh = io.BytesIO(data.encode('UTF-8'))
        G = nx.read_graphml(fh)
        assert list(G.edges()) == [('n0', 'n1')]
        assert G['n0']['n1']['id'] == 'e0'
        assert G.nodes['n0']['label'] == '1'
        assert G.nodes['n1']['label'] == '2'

        H = nx.parse_graphml(data)
        assert list(H.edges()) == [('n0', 'n1')]
        assert H['n0']['n1']['id'] == 'e0'
        assert H.nodes['n0']['label'] == '1'
        assert H.nodes['n1']['label'] == '2'
Beispiel #56
0
    def test_read_attributes_with_groups(self):
        data = """\
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:java="http://www.yworks.com/xml/yfiles-common/1.0/java" xmlns:sys="http://www.yworks.com/xml/yfiles-common/markup/primitives/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/markup/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/graphml" xmlns:yed="http://www.yworks.com/xml/yed/3" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://www.yworks.com/xml/schema/graphml/1.1/ygraphml.xsd">
  <!--Created by yEd 3.17-->
  <key attr.name="Description" attr.type="string" for="graph" id="d0"/>
  <key for="port" id="d1" yfiles.type="portgraphics"/>
  <key for="port" id="d2" yfiles.type="portgeometry"/>
  <key for="port" id="d3" yfiles.type="portuserdata"/>
  <key attr.name="CustomProperty" attr.type="string" for="node" id="d4">
    <default/>
  </key>
  <key attr.name="url" attr.type="string" for="node" id="d5"/>
  <key attr.name="description" attr.type="string" for="node" id="d6"/>
  <key for="node" id="d7" yfiles.type="nodegraphics"/>
  <key for="graphml" id="d8" yfiles.type="resources"/>
  <key attr.name="url" attr.type="string" for="edge" id="d9"/>
  <key attr.name="description" attr.type="string" for="edge" id="d10"/>
  <key for="edge" id="d11" yfiles.type="edgegraphics"/>
  <graph edgedefault="directed" id="G">
    <data key="d0"/>
    <node id="n0">
      <data key="d4"><![CDATA[CustomPropertyValue]]></data>
      <data key="d6"/>
      <data key="d7">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="125.0" y="-255.4611111111111"/>
          <y:Fill color="#FFCC00" transparent="false"/>
          <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">2<y:LabelModel>
              <y:SmartNodeLabelModel distance="4.0"/>
            </y:LabelModel>
            <y:ModelParameter>
              <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
            </y:ModelParameter>
          </y:NodeLabel>
          <y:Shape type="rectangle"/>
        </y:ShapeNode>
      </data>
    </node>
    <node id="n1" yfiles.foldertype="group">
      <data key="d4"><![CDATA[CustomPropertyValue]]></data>
      <data key="d5"/>
      <data key="d6"/>
      <data key="d7">
        <y:ProxyAutoBoundsNode>
          <y:Realizers active="0">
            <y:GroupNode>
              <y:Geometry height="250.38333333333333" width="140.0" x="-30.0" y="-330.3833333333333"/>
              <y:Fill color="#F5F5F5" transparent="false"/>
              <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
              <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="140.0" x="0.0" y="0.0">Group 3</y:NodeLabel>
              <y:Shape type="roundrectangle"/>
              <y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
              <y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
              <y:BorderInsets bottom="1" bottomF="1.0" left="0" leftF="0.0" right="0" rightF="0.0" top="1" topF="1.0001736111111086"/>
            </y:GroupNode>
            <y:GroupNode>
              <y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
              <y:Fill color="#F5F5F5" transparent="false"/>
              <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
              <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 3</y:NodeLabel>
              <y:Shape type="roundrectangle"/>
              <y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
              <y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
              <y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
            </y:GroupNode>
          </y:Realizers>
        </y:ProxyAutoBoundsNode>
      </data>
      <graph edgedefault="directed" id="n1:">
        <node id="n1::n0" yfiles.foldertype="group">
          <data key="d4"><![CDATA[CustomPropertyValue]]></data>
          <data key="d5"/>
          <data key="d6"/>
          <data key="d7">
            <y:ProxyAutoBoundsNode>
              <y:Realizers active="0">
                <y:GroupNode>
                  <y:Geometry height="83.46111111111111" width="110.0" x="-15.0" y="-292.9222222222222"/>
                  <y:Fill color="#F5F5F5" transparent="false"/>
                  <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
                  <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="110.0" x="0.0" y="0.0">Group 1</y:NodeLabel>
                  <y:Shape type="roundrectangle"/>
                  <y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
                  <y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
                  <y:BorderInsets bottom="1" bottomF="1.0" left="0" leftF="0.0" right="0" rightF="0.0" top="1" topF="1.0001736111111086"/>
                </y:GroupNode>
                <y:GroupNode>
                  <y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
                  <y:Fill color="#F5F5F5" transparent="false"/>
                  <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
                  <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 1</y:NodeLabel>
                  <y:Shape type="roundrectangle"/>
                  <y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
                  <y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
                  <y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
                </y:GroupNode>
              </y:Realizers>
            </y:ProxyAutoBoundsNode>
          </data>
          <graph edgedefault="directed" id="n1::n0:">
            <node id="n1::n0::n0">
              <data key="d4"><![CDATA[CustomPropertyValue]]></data>
              <data key="d6"/>
              <data key="d7">
                <y:ShapeNode>
                  <y:Geometry height="30.0" width="30.0" x="50.0" y="-255.4611111111111"/>
                  <y:Fill color="#FFCC00" transparent="false"/>
                  <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
                  <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">1<y:LabelModel>
                      <y:SmartNodeLabelModel distance="4.0"/>
                    </y:LabelModel>
                    <y:ModelParameter>
                      <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
                    </y:ModelParameter>
                  </y:NodeLabel>
                  <y:Shape type="rectangle"/>
                </y:ShapeNode>
              </data>
            </node>
            <node id="n1::n0::n1">
              <data key="d4"><![CDATA[CustomPropertyValue]]></data>
              <data key="d6"/>
              <data key="d7">
                <y:ShapeNode>
                  <y:Geometry height="30.0" width="30.0" x="0.0" y="-255.4611111111111"/>
                  <y:Fill color="#FFCC00" transparent="false"/>
                  <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
                  <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">3<y:LabelModel>
                      <y:SmartNodeLabelModel distance="4.0"/>
                    </y:LabelModel>
                    <y:ModelParameter>
                      <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
                    </y:ModelParameter>
                  </y:NodeLabel>
                  <y:Shape type="rectangle"/>
                </y:ShapeNode>
              </data>
            </node>
          </graph>
        </node>
        <node id="n1::n1" yfiles.foldertype="group">
          <data key="d4"><![CDATA[CustomPropertyValue]]></data>
          <data key="d5"/>
          <data key="d6"/>
          <data key="d7">
            <y:ProxyAutoBoundsNode>
              <y:Realizers active="0">
                <y:GroupNode>
                  <y:Geometry height="83.46111111111111" width="110.0" x="-15.0" y="-179.4611111111111"/>
                  <y:Fill color="#F5F5F5" transparent="false"/>
                  <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
                  <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="110.0" x="0.0" y="0.0">Group 2</y:NodeLabel>
                  <y:Shape type="roundrectangle"/>
                  <y:State closed="false" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
                  <y:Insets bottom="15" bottomF="15.0" left="15" leftF="15.0" right="15" rightF="15.0" top="15" topF="15.0"/>
                  <y:BorderInsets bottom="1" bottomF="1.0" left="0" leftF="0.0" right="0" rightF="0.0" top="1" topF="1.0001736111111086"/>
                </y:GroupNode>
                <y:GroupNode>
                  <y:Geometry height="50.0" width="50.0" x="0.0" y="60.0"/>
                  <y:Fill color="#F5F5F5" transparent="false"/>
                  <y:BorderStyle color="#000000" type="dashed" width="1.0"/>
                  <y:NodeLabel alignment="right" autoSizePolicy="node_width" backgroundColor="#EBEBEB" borderDistance="0.0" fontFamily="Dialog" fontSize="15" fontStyle="plain" hasLineColor="false" height="21.4609375" horizontalTextPosition="center" iconTextGap="4" modelName="internal" modelPosition="t" textColor="#000000" verticalTextPosition="bottom" visible="true" width="65.201171875" x="-7.6005859375" y="0.0">Folder 2</y:NodeLabel>
                  <y:Shape type="roundrectangle"/>
                  <y:State closed="true" closedHeight="50.0" closedWidth="50.0" innerGraphDisplayEnabled="false"/>
                  <y:Insets bottom="5" bottomF="5.0" left="5" leftF="5.0" right="5" rightF="5.0" top="5" topF="5.0"/>
                  <y:BorderInsets bottom="0" bottomF="0.0" left="0" leftF="0.0" right="0" rightF="0.0" top="0" topF="0.0"/>
                </y:GroupNode>
              </y:Realizers>
            </y:ProxyAutoBoundsNode>
          </data>
          <graph edgedefault="directed" id="n1::n1:">
            <node id="n1::n1::n0">
              <data key="d4"><![CDATA[CustomPropertyValue]]></data>
              <data key="d6"/>
              <data key="d7">
                <y:ShapeNode>
                  <y:Geometry height="30.0" width="30.0" x="0.0" y="-142.0"/>
                  <y:Fill color="#FFCC00" transparent="false"/>
                  <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
                  <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">5<y:LabelModel>
                      <y:SmartNodeLabelModel distance="4.0"/>
                    </y:LabelModel>
                    <y:ModelParameter>
                      <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
                    </y:ModelParameter>
                  </y:NodeLabel>
                  <y:Shape type="rectangle"/>
                </y:ShapeNode>
              </data>
            </node>
            <node id="n1::n1::n1">
              <data key="d4"><![CDATA[CustomPropertyValue]]></data>
              <data key="d6"/>
              <data key="d7">
                <y:ShapeNode>
                  <y:Geometry height="30.0" width="30.0" x="50.0" y="-142.0"/>
                  <y:Fill color="#FFCC00" transparent="false"/>
                  <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
                  <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">6<y:LabelModel>
                      <y:SmartNodeLabelModel distance="4.0"/>
                    </y:LabelModel>
                    <y:ModelParameter>
                      <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
                    </y:ModelParameter>
                  </y:NodeLabel>
                  <y:Shape type="rectangle"/>
                </y:ShapeNode>
              </data>
            </node>
          </graph>
        </node>
      </graph>
    </node>
    <node id="n2">
      <data key="d4"><![CDATA[CustomPropertyValue]]></data>
      <data key="d6"/>
      <data key="d7">
        <y:ShapeNode>
          <y:Geometry height="30.0" width="30.0" x="125.0" y="-142.0"/>
          <y:Fill color="#FFCC00" transparent="false"/>
          <y:BorderStyle color="#000000" raised="false" type="line" width="1.0"/>
          <y:NodeLabel alignment="center" autoSizePolicy="content" fontFamily="Dialog" fontSize="12" fontStyle="plain" hasBackgroundColor="false" hasLineColor="false" height="17.96875" horizontalTextPosition="center" iconTextGap="4" modelName="custom" textColor="#000000" verticalTextPosition="bottom" visible="true" width="11.634765625" x="9.1826171875" y="6.015625">9<y:LabelModel>
              <y:SmartNodeLabelModel distance="4.0"/>
            </y:LabelModel>
            <y:ModelParameter>
              <y:SmartNodeLabelModelParameter labelRatioX="0.0" labelRatioY="0.0" nodeRatioX="0.0" nodeRatioY="0.0" offsetX="0.0" offsetY="0.0" upX="0.0" upY="-1.0"/>
            </y:ModelParameter>
          </y:NodeLabel>
          <y:Shape type="rectangle"/>
        </y:ShapeNode>
      </data>
    </node>
    <edge id="n1::n1::e0" source="n1::n1::n0" target="n1::n1::n1">
      <data key="d10"/>
      <data key="d11">
        <y:PolyLineEdge>
          <y:Path sx="15.0" sy="-0.0" tx="-15.0" ty="-0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="standard"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
    </edge>
    <edge id="n1::n0::e0" source="n1::n0::n1" target="n1::n0::n0">
      <data key="d10"/>
      <data key="d11">
        <y:PolyLineEdge>
          <y:Path sx="15.0" sy="-0.0" tx="-15.0" ty="-0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="standard"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
    </edge>
    <edge id="e0" source="n1::n0::n0" target="n0">
      <data key="d10"/>
      <data key="d11">
        <y:PolyLineEdge>
          <y:Path sx="15.0" sy="-0.0" tx="-15.0" ty="-0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="standard"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
    </edge>
    <edge id="e1" source="n1::n1::n1" target="n2">
      <data key="d10"/>
      <data key="d11">
        <y:PolyLineEdge>
          <y:Path sx="15.0" sy="-0.0" tx="-15.0" ty="-0.0"/>
          <y:LineStyle color="#000000" type="line" width="1.0"/>
          <y:Arrows source="none" target="standard"/>
          <y:BendStyle smoothed="false"/>
        </y:PolyLineEdge>
      </data>
    </edge>
  </graph>
  <data key="d8">
    <y:Resources/>
  </data>
</graphml>
"""
        # verify that nodes / attributes are correctly read when part of a group
        fh = io.BytesIO(data.encode('UTF-8'))
        G = nx.read_graphml(fh)
        data = [x for _, x in G.nodes(data=True)]
        assert len(data) == 9
        for node_data in data:
            assert node_data['CustomProperty'] != ''
    # when the package is not installed, import the local version instead.
    # the notebook must be placed in the original "notebooks/" folder
    sys.path.append("../")
    from localgraphclustering import *

import time

import networkx as nx

import random

import statistics as stat_

g = GraphLocal('../datasets/JohnsHopkins.graphml', 'graphml')

G = nx.read_graphml('../datasets/JohnsHopkins.graphml')

ground_truth_clusters_by_number = dict()

cluster_names = [
    'secondMajor', 'highSchool', 'gender', 'dorm', 'majorIndex', 'year'
]

for node in G.nodes(data=True):

    for cluster_name in cluster_names:

        ground_truth_clusters_by_number[cluster_name +
                                        str(node[1][cluster_name])] = []

counter = 0
Beispiel #58
0
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """
    iflogger.info("Create the ROIs:")
    output_dir = op.abspath(op.curdir)
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    parval = cmp_config._get_lausanne_parcellation(
        'Lausanne2008')[parcellation_name]
    pgpath = parval['node_information_graphml']
    aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
    asegd = aseg.get_data()

    # identify cortical voxels, right (3) and left (42) hemispheres
    idxr = np.where(asegd == 3)
    idxl = np.where(asegd == 42)
    xx = np.concatenate((idxr[0], idxl[0]))
    yy = np.concatenate((idxr[1], idxl[1]))
    zz = np.concatenate((idxr[2], idxl[2]))

    # initialize variables necessary for cortical ROIs dilation
    # dimensions of the neighbourhood for rois labels assignment (choose odd dimensions!)
    shape = (25, 25, 25)
    center = np.array(shape) // 2
    # dist: distances from the center of the neighbourhood
    dist = np.zeros(shape, dtype='float32')
    for x in range(shape[0]):
        for y in range(shape[1]):
            for z in range(shape[2]):
                distxyz = center - [x, y, z]
                dist[x, y, z] = np.sqrt(np.sum(np.multiply(distxyz, distxyz)))

    iflogger.info("Working on parcellation: ")
    iflogger.info(
        cmp_config._get_lausanne_parcellation('Lausanne2008')
        [parcellation_name])
    iflogger.info("========================")
    pg = nx.read_graphml(pgpath)
    # each node represents a brain region
    # create a big 256^3 volume for storage of all ROIs
    rois = np.zeros((256, 256, 256), dtype=np.int16)

    count = 0
    for brk, brv in pg.nodes(data=True):
        count = count + 1
        iflogger.info(brv)
        iflogger.info(brk)
        if brv['dn_hemisphere'] == 'left':
            hemi = 'lh'
        elif brv['dn_hemisphere'] == 'right':
            hemi = 'rh'
        if brv['dn_region'] == 'subcortical':
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv['dn_region']))
            iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")
            # if it is subcortical, retrieve roi from aseg
            idx = np.where(asegd == int(brv['dn_fs_aseg_val']))
            rois[idx] = int(brv['dn_correspondence_id'])

        elif brv['dn_region'] == 'cortical':
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv['dn_region']))
            iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")

            labelpath = op.join(output_dir,
                                parval['fs_label_subdir_name'] % hemi)
            # construct .label file name

            fname = '%s.%s.label' % (hemi, brv['dn_fsname'])

            # execute fs mri_label2vol to generate volume roi from the label file
            # store it in temporary file to be overwritten for each region

            mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (
                op.join(labelpath, fname), op.join(fs_dir, 'mri', 'orig.mgz'),
                op.join(output_dir, 'tmp.nii.gz'))
            runCmd(mri_cmd, log)

            tmp = nb.load(op.join(output_dir, 'tmp.nii.gz'))
            tmpd = tmp.get_data()

            # find voxel and set them to intensityvalue in rois
            idx = np.where(tmpd == 1)
            rois[idx] = int(brv['dn_correspondence_id'])

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.abspath('ROI_%s.nii.gz' % parcellation_name)

        # update the header
        hdr = aseg.header
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)

        log.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

    iflogger.info("[ DONE ]")
    # dilate cortical regions
    if dilation is True:
        iflogger.info("Dilating cortical regions...")
        # loop throughout all the voxels belonging to the aseg GM volume
        for j in range(xx.size):
            if rois[xx[j], yy[j], zz[j]] == 0:
                local = extract(rois,
                                shape,
                                position=(xx[j], yy[j], zz[j]),
                                fill=0)
                mask = local.copy()
                mask[np.nonzero(local > 0)] = 1
                thisdist = np.multiply(dist, mask)
                thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist)
                value = np.int_(
                    local[np.nonzero(thisdist == np.amin(thisdist))])
                if value.size > 1:
                    counts = np.bincount(value)
                    value = np.argmax(counts)
                rois[xx[j], yy[j], zz[j]] = value

        # store volume eg in ROIv_scale33.nii.gz
        out_roi = op.abspath('ROIv_%s.nii.gz' % parcellation_name)
        iflogger.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

        iflogger.info("[ DONE ]")
Beispiel #59
0
    async def load_graph(self, graph_data, onto_uri, graph_file_type):
        # Load the graph file
        if graph_file_type == 'pajek':
            nxGraph = nx.read_pajek(graph_data.file)
        elif graph_file_type == 'graphml':
            nxGraph = nx.read_graphml(graph_data.file)

        self.subgraph = nxGraph
        self.bel_graph = onto2nx.parse_owl_rdf(onto_uri)

        # Calculate the number of nodes in the graph
        nodes_list = list(nxGraph.nodes(data=True))

        # Calculate epsilon
        # degree_sequence = [d for n, d in nxGraph.degree()]

        # For each agent in the graph
        i = 0

        template = Template()
        template.set_metadata('conversation', 'consensus')
        self.add_behaviour(RecvBehav(), template)
        self.add_behaviour(self.ConsensusObserver())

        for key, node in nodes_list:
            # Create an agent
            if 'id' not in node:
                node['id'] = node['name']
            jid1 = "".join(node['id'].split()).lower() + self.jid_domain

            # print("Name: ", jid1)

            passwd1 = "test"

            # Give the agent a name, that has the same label as in the graph
            node_agent = NodeAgent(jid1,
                                   password=passwd1,
                                   onto_uri=onto_uri,
                                   use_container=True,
                                   loop=self.loop)

            self.mas_dict[jid1] = node_agent
            base_port = 30000
            url = node_agent.url + str(base_port + i) + "/agent"
            nxGraph.node[key]['url'] = url

            neighList = list(nxGraph.neighbors(key))
            neighList.append(key)
            # print("NeighList: ", neighList)
            subgraph = nxGraph.subgraph(neighList)

            config = {
                'base_port': base_port,
                'value': i,
                'subgraph': subgraph,
                'observers': [self.jid]
            }
            config.update(node)
            node_agent.configure(config)

            i = i + 1

        coros = [agent.deploy() for agent in self.mas_dict.values()]
        await asyncio.gather(*coros)

        await self.subscribe_to_neighbours()

        # calculate avail_contacts
        await self.update_available_contacts()
#[1] Preprocessing============================================================

#1 Load neccessray Libraries
import random, networkx as nx, pandas as pd, matplotlib.pyplot as plt, numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_auc_score
from sklearn.utils import shuffle
from sklearn import metrics
from datetime import datetime

#2 Load graphml network
twt = nx.read_graphml('twitter-745-directed-project(numeric).graphml')

#3 Display number of edges and nodes
a = twt.number_of_nodes()
t = twt.number_of_edges()
print("Number of nodes : %d" % a)
print("Number of edges : %d" % t)

#4 Create seed and time tracker
random.seed(0)
t1 = datetime.now()

#5 Create a test set of 95 percent from G
edges_to_remove_from_twt = random.sample(twt.edges(),
                                         int(0.05 * twt.number_of_edges()))
twt_test = twt.copy()
twt_test.remove_edges_from(edges_to_remove_from_twt)
print("Number of edges deleted : %d" % len(edges_to_remove_from_twt))
print("Number of edges remaining : %d" % (twt_test.number_of_edges()))