Example #1
0
    def save_celltype_graph(self, filename='celltype_conn.gml', format='gml'):
        """
        Save the celltype-to-celltype connectivity information in a file.
        
        filename -- path of the file to be saved.

        format -- format to save in. Using GML as GraphML support is
        not complete in NetworkX.  

        """
        start = datetime.now()
        if format == 'gml':
            nx.write_gml(self.__celltype_graph, filename)
        elif format == 'yaml':
            nx.write_yaml(self.__celltype_graph, filename)
        elif format == 'graphml':
            nx.write_graphml(self.__celltype_graph, filename)
        elif format == 'edgelist':
            nx.write_edgelist(self.__celltype_graph, filename)
        elif format == 'pickle':
            nx.write_gpickle(self.__celltype_graph, filename)
        else:
            raise Exception(
                'Supported formats: gml, graphml, yaml. Received: %s' %
                (format))
        end = datetime.now()
        delta = end - start
        config.BENCHMARK_LOGGER.info(
            'Saved celltype_graph in file %s of format %s in %g s' %
            (filename, format, delta.seconds + delta.microseconds * 1e-6))
        print 'Saved celltype connectivity graph in', filename
Example #2
0
    def save_celltype_graph(self, filename="celltype_conn.gml", format="gml"):
        """
        Save the celltype-to-celltype connectivity information in a file.
        
        filename -- path of the file to be saved.

        format -- format to save in. Using GML as GraphML support is
        not complete in NetworkX.  

        """
        start = datetime.now()
        if format == "gml":
            nx.write_gml(self.__celltype_graph, filename)
        elif format == "yaml":
            nx.write_yaml(self.__celltype_graph, filename)
        elif format == "graphml":
            nx.write_graphml(self.__celltype_graph, filename)
        elif format == "edgelist":
            nx.write_edgelist(self.__celltype_graph, filename)
        elif format == "pickle":
            nx.write_gpickle(self.__celltype_graph, filename)
        else:
            raise Exception("Supported formats: gml, graphml, yaml. Received: %s" % (format))
        end = datetime.now()
        delta = end - start
        config.BENCHMARK_LOGGER.info(
            "Saved celltype_graph in file %s of format %s in %g s"
            % (filename, format, delta.seconds + delta.microseconds * 1e-6)
        )
        print "Saved celltype connectivity graph in", filename
Example #3
0
 def __update_status(self):
     #getting the proportion of non empty feature sets evaluated
     self.status_=np.mean([y is not None for x,y in nx.get_node_attributes(self.G,'score').items() if x!=0])
     self.leaves_={n:self.G.node[n]['score'] for n,deg in self.G.out_degree if deg==0 and self.G.node[n]['score'] is not None}
     nx.write_yaml(self.G,self.file)
     #self.verboseprint('Crawler status: progress: {0.status_}, leaves: {0.leaves_}.'.format(self))
     return self
def write_graph(graph, filename, file_type=None):
    if not file_type:
        file_type = get_graph_type(filename)
        if not file_type:
            raise RuntimeError("Unable to determine graph file type.")

    if file_type == "adjlist":
        networkx.write_adjlist(graph, filename)
    elif file_type == "edgelist":
        networkx.write_edgelist(graph, filename)
    elif file_type == "gexf":
        networkx.write_gexf(graph, filename)
    elif file_type == "gml":
        networkx.write_gml(graph, filename)
    elif file_type == "gpickle":
        networkx.write_gpickle(graph, filename)
    elif file_type == "graphml":
        networkx.write_graphml(graph, filename)
    elif file_type == "yaml":
        networkx.write_yaml(graph, filename)
    elif file_type == "pajek" or file_type == "net":
        networkx.write_pajek(graph, filename)
    elif file_type == "adjmat":
        #sparse_matrix = networkx.adjacency_matrix(graph)
        #dense_matrix = sparse_matrix.todense()
        #dense_matrix.tofile(filename, sep=",", format="%g")

        matrix = networkx.to_numpy_matrix(graph)
        numpy.savetxt(filename, matrix, delimiter=",", newline="\n", fmt="%g")
    else:
        raise RuntimeError("Unrecognized output graph file type.")
def main(inname='submission.yaml'):
    try:
        submission = nx.read_yaml(inname)
    except ScannerError as se:
        print(
            'Unable to read file: {} Please ensure file has properly formatted YAML.'
            .format(inname))
        print(se)
        sys.exit(125)

    # No submission, case of an update to deprecated IDs
    if submission is None:
        indications = nx.read_yaml('indication_paths.yaml')
        indications = filter_deprecated_ids(indications)
        nx.write_yaml(indications, 'indication_paths.yaml', indent=4)
        print('Paths writen successfully with deprecated IDs removed')
        sys.exit(0)

    # Prep the submission fixing simple common errors and throwing excptions when not simple fixes
    submission = test_and_fix(submission)

    # Determine if Update or new submission
    has_identifier = ['_id' in rec['graph'].keys() for rec in submission]
    if all(has_identifier):

        update_existing_records(submission)
    elif any(has_identifier):
        print('Mixed submissions and updates are not allowed. Please ensure that either all records ' +\
              'contain an `_id` feild in the `graph` feild if updating, or none contain the `_id` ' +\
              'field if submitting new records')
        sys.exit(400)

    else:
        add_new_submission(submission)
Example #6
0
def export_graph(graph, directory):
    # write to GraphML file
    nx.write_graphml(graph, directory + "/" + prefix + ".graphml")

    # write to Yaml file
    nx.write_yaml(graph, directory + "/" + prefix + ".yaml")

    # write to Gexf file
    nx.write_gexf(graph, directory + "/" + prefix + ".gexf")

    # write to JSON file
    json.dump(json_graph.node_link_data(graph),
              open(directory + "/" + prefix + ".json", 'w'),
              indent=2)

    # write to CSV files
    with open(directory + "/" + prefix + '_pages.csv', 'w') as csv_file:
        writer = csv.writer(csv_file)
        writer.writerow([
            'id', 'label', 'title', 'url', 'hub', 'authority', 'pagerank',
            'in_degree', 'out_degree', 'closeness', 'betweenness'
        ])
        for node, data in graph.nodes(data=True):
            writer.writerow([
                node, data['label'], data['title'], data['url'], data['hub'],
                data['authority'], data['pagerank'], data['in_degree'],
                data['out_degree'], data['closeness'], data['betweenness']
            ])

    with open(directory + "/" + prefix + '_pages_links.csv', 'w') as csv_file:
        writer = csv.writer(csv_file)
        writer.writerow(['from', 'to'])
        for f, t, a in graph.edges(data=True):
            writer.writerow([f, t])
Example #7
0
def save(G, file_name):
    """Save the graph to .yaml format

    .. note::
        Yaml was the only format that supported utf-8 attributes and dictionary attributes
    """
    import networkx as nx
    nx.write_yaml(G, file_name, encoding="utf-8")
Example #8
0
 def save(self, G, Epoch):
     result_dir = './result/'
     info = "{}_{}_{}_{}".format(self.network_type, self.player_type,
                                 self.update_rule, Epoch)
     Epoch_dir = os.path.join(result_dir, self.dir_str, info)
     if not os.path.exists(Epoch_dir):
         os.mkdir(Epoch_dir)
     graph_path = os.path.join(Epoch_dir, info + "_Graph.yaml")
     nx.write_yaml(G, graph_path)
Example #9
0
def convert_graph_to_yaml(graph, filename):
    """
    convert from a networkX graph object, to yaml format.
    :param graph: the networkX graph object.
    :param filename: the name of the file to write to.
    :return:
    """
    networkx.write_yaml(graph, filename)
    return
Example #10
0
def update_existing_records(submission, outname='indication_paths.yaml'):

    if submission is not None:
        print('Updating Indications...')

        indications = nx.read_yaml('indication_paths.yaml')

        current_ids = [ind['graph']['_id'] for ind in indications]
        submission_prepped = {rec['graph']['_id']: rec for rec in submission}

        # Make sure new recrods have unique IDs
        if len(submission_prepped) != len(submission):
            submitted_id_counts = Counter(
                [rec['graph']['_id'] for rec in submission])
            duplicated = [k for k, v in submitted_id_counts.items() if v > 1]
            print(
                'Error, the folling identifiers were used for multiple paths:\n{}'
                .format(', '.join(duplicated)))
            sys.exit(400)

        # Make sure the Updated records exist
        diff = set(submission_prepped.keys()) - set(current_ids)
        if diff:
            print(
                "Error, the following path id's submitted for update do not exist:\n{}"
                .format(', '.join(diff)))
            sys.exit(400)

        out = []
        errors = []
        for ind in indications:
            _id = ind['graph']['_id']

            if _id in submission_prepped.keys():
                # Previous checks will have ensured that the `graph` data matches the `nodes` and `links` data
                # To ensure that we're updating the right path we can simply check that the identifier
                # is correct for the info found in `graph`
                try:
                    validate_path_id(_id, submission_prepped[_id])
                    out.append(submission_prepped[_id])
                except AssertionError as ae:
                    errors.append(ae)
            else:
                out.append(ind)

        if errors:
            print('Update Unsuccessful')
            print('There were {} paths that produced errors'.format(
                len(errors)))
            print('Please see error messages below\n')
            for error in errors:
                print(error, end='\n\n')
            sys.exit(400)
        else:
            out = filter_deprecated_ids(out)
            print('Update Successful')
            nx.write_yaml(out, outname, indent=4)
def convert_graph_to_yaml(graph, filename):
    """
    convert from a networkX graph object, to yaml format.
    :param graph: the networkX graph object.
    :param filename: the name of the file to write to.
    :return:
    """
    networkx.write_yaml(graph, filename)
    return
Example #12
0
    def write_yaml(self, yaml_file_name):
        r"""Export to YAML format

        Parameters
        ----------
        yaml_file_name : str
            Path to the YAML file

        """
        nx.write_yaml(self, yaml_file_name)
Example #13
0
    def assert_equal(self, G, data=False):
        (fd, fname) = tempfile.mkstemp()
        nx.write_yaml(G, fname)
        Gin = nx.read_yaml(fname)

        assert_nodes_equal(list(G), list(Gin))
        assert_edges_equal(G.edges(data=data), Gin.edges(data=data))

        os.close(fd)
        os.unlink(fname)
Example #14
0
    def assert_equal(self, G, data=False):
        (fd, fname) = tempfile.mkstemp()
        nx.write_yaml(G, fname)
        Gin = nx.read_yaml(fname);

        assert_equal(sorted(G.nodes()),sorted(Gin.nodes()))
        assert_equal(G.edges(data=data),Gin.edges(data=data))

        os.close(fd)
        os.unlink(fname)
Example #15
0
def main():
    G = nx.Graph()
    G.add_node("조준영")
    G.add_node("김영진")
    G.add_edge("조준영","김영진")

    nx.write_yaml(G, "data.yml")

    nx.draw(G)
    plt.show()
Example #16
0
    def assert_equal(self, G, data=False):
        (fd, fname) = tempfile.mkstemp()
        nx.write_yaml(G, fname)
        Gin = nx.read_yaml(fname)

        assert_nodes_equal(list(G), list(Gin))
        assert_edges_equal(G.edges(data=data), Gin.edges(data=data))

        os.close(fd)
        os.unlink(fname)
def combineAll(bucket):
	bucket=str(bucket)
	folder='/Users/Ish/Documents/OSM_Files/haiti_earthquake/networks/overlapping_changesets_by_'+bucket+'_hour/'
	B=nx.Graph()
	for file in os.listdir(folder):
		G=nx.read_gml(folder+file)
		G=nx.Graph(G)
		B=composeWeights(B,G)
	#return B
	nx.write_yaml(B,'../results/'+bucket+'hourBigNetwork.yaml')

	combineAll(8)
def prune_reconstructed_architectures(archs):
    tot_archs = []

    # remove the architecture doesn't make any sense
    for each_arch in archs:
        """
            1: Check if the narrow connection is sane
        """
        if not _sane_narrow_connection( \
            each_arch, '[0] Embedding', '[12] Sigmoid'): continue

        # add the survived ones
        tot_archs.append(each_arch)
    # end for each...

    # check if the store location exists
    store_loc = os.path.join(_save_dir, 'architectures')
    if not os.path.exists(store_loc): os.makedirs(store_loc)

    # store the architecture as a graph and data
    options = {
        'node_color': 'red',
        'node_size': 40,
        'width': 1,
        'alpha': 0.8,
        'arrowstyle': '-|>',
        'arrowsize': 8,
        'font_size': 10,
    }
    for aidx, each_arch in enumerate(tot_archs):
        # : write the edgelists to a YAML file
        nx.write_yaml( \
            each_arch, \
            os.path.join(store_loc, 'architecture_{}.yaml'.format(aidx)))

        # : relabel the node names to include the attribute parameters
        new_nodes = {}
        for each_node, each_data in each_arch.nodes(data=True):
            each_attr = '{}'.format(each_node)
            if ('attr_param' in each_data) \
                and each_data['attr_param']:
                each_attr += ' - {}'.format(each_data['attr_param'])
            new_nodes[each_node] = each_attr
        new_each_arch = nx.relabel_nodes(each_arch, new_nodes, copy=True)

        # : networkx - draw the graphs
        nx.draw_networkx(new_each_arch, arrows=True, **options)
        plt.savefig(os.path.join(store_loc, 'architecture_{}.pdf'.format(aidx)))
        plt.clf()
    # end for aidx...

    return tot_archs
Example #19
0
 def save_graph(self, filename, fileType):
     """Write graph to GML or Adjacency list or YML format
     
     Args:
         filename : File or filename to read
         fileType (str): Type of file to write, should be "GML Format" or "Adjacency list"
                         or "YAML"
     """
     if fileType == "GML Format":
         nx.write_gml(self.graph, filename + ".gml")
     if fileType == "Adjacency list":
         nx.write_adjlist(self.graph, filename + ".adjlist")
     if fileType == "YAML":
         nx.write_yaml(self.graph, filename + ".yaml")
def combineAll(in_folder, out_folder):
    #expFolder='/Users/Ish/Dropbox/OSM/results/TwoWeeks/overlapping_changesets/ExpAnnotNets/\
    #overlapping_changesets_by_'+bucket+'_hour/'
    B = nx.DiGraph()
    for file in os.listdir(in_folder):
        if file != '.DS_Store':  #weird MAC thing
            path = in_folder + file

            G = getJsonNet(path)
            #G=nx.read_gml(path)
            G = nx.DiGraph(G)
            B = composeWeights(B, G)
    #return B
    nx.write_yaml(B, out_folder)
 def save(self,G,Epoch):
     #Save Graph
     result_dir = './result/'
     info = "{}_{}_{}_{}".format(self.network_type,self.player_type,self.update_rule,Epoch)
     Epoch_dir = os.path.join(result_dir,self.dir_str,info)
     if not os.path.exists(Epoch_dir):
         os.mkdir(Epoch_dir)
     graph_path = os.path.join(Epoch_dir,info+"_Graph.yaml")
     nx.write_yaml(G,graph_path)
     #Save strategy
     p_vector = self.get_all_values(G,'p')
     q_vector = self.get_all_values(G,'q')
     pq_array = np.vstack((p_vector,q_vector))
     pq_path = os.path.join(Epoch_dir,info+"_strategy.csv")
     pq = pd.DataFrame(data = pq_array)
     pq.to_csv(pq_path)
Example #22
0
    def graph_generator(self, graph_model, graph_param, save_path, file_name):
        """
		Create random graph

		"""

        graph_param[0] = int(graph_param[0])
        if graph_model == 'ws':
            graph_param[1] = int(graph_param[1])
            graph = nx.random_graphs.connected_watts_strogatz_graph(
                *graph_param)
        elif graph_model == 'er':
            graph = nx.random_graphs.erdos_renyi_graph(*graph_param)
        elif graph_model == 'ba':
            graph_param[1] = int(graph_param[1])
            graph = nx.random_graphs.barabasi_albert_graph(*graph_param)

        if os.path.isfile(save_path + '/' + file_name + '.yaml') is True:
            print('graph loaded')
            dgraph = nx.read_yaml(save_path + '/' + file_name + '.yaml')

        else:
            dgraph = nx.DiGraph()
            dgraph.add_nodes_from(graph.nodes)
            dgraph.add_edges_from(graph.edges)

        dgraph = nx.DiGraph()
        dgraph.add_nodes_from(graph.nodes)
        dgraph.add_edges_from(graph.edges)

        in_node = []
        out_node = []
        for indeg, outdeg in zip(dgraph.in_degree, dgraph.out_degree):
            if indeg[1] == 0:
                in_node.append(indeg[0])
            elif outdeg[1] == 0:
                out_node.append(outdeg[0])
        sorted = list(nx.topological_sort(dgraph))

        if os.path.isdir(save_path) is False:
            os.makedirs(save_path)

        if os.path.isfile(save_path + '/' + file_name + '.yaml') is False:
            print('graph_saved')
            nx.write_yaml(dgraph, save_path + '/' + file_name + '.yaml')

        return dgraph, sorted, in_node, out_node
Example #23
0
    def generate_cpt_graph(self, inputfile):
        output = self.config.OUTPUT_DIR
        concepts = self.parser.parse_cpt(inputfile)
        name = ntpath.basename(inputfile)
        graph = nx.MultiDiGraph()
        for concept in concepts:
            graph.add_node(f"Step:{concept.name}",
                           smell=False,
                           smell_names=list())
            for i, step in enumerate(concept.steps):
                graph.add_node(f"Step:{step}", smell=False, smell_names=list())
                graph.add_edge(f"Step:{concept.name}",
                               f"Step:{step}",
                               label=i + 1)
        nx.write_yaml(graph, f"{output}/{name}.yaml")

        return name
Example #24
0
def dump_digraph(filename, G):
	"""!
	Functions writes networkx Digraph as gpickle, adjacency list, gexf and gml file
	@param [in] filename as string
	@param [in] G as networkX DiGraph
	"""
	print_info("Dumping digraph to " + filename)
	nx.write_gpickle(G, filename + design_name + ".gpickle")
	nx.write_adjlist(G, filename + design_name )
	nx.write_yaml(G, filename+ ".yaml")
	nx.write_gml(G, filename + design_name + ".gml")
	nx.write_gexf(G, filename + design_name +  ".gexf" , prettyprint=True)
	nx.write_gml(G, filename + design_name + ".gml")



	return
def draw_graph(node_list, edge_list):

    G = nx.Graph()
    G.add_nodes_from(node_list)
    G.add_edges_from(edge_list)
    all_node_degrees = list(G.degree(node_list))
    sum_degree = 0
    for (u, v) in all_node_degrees:
        sum_degree += float(v)
    print("Average node degree: {}".format(sum_degree / len(node_list)))
    print(len(G))
    #nx.draw_shell(G, with_labels=True, node_size=200)
    #plt.savefig('apartment_high.png')
    #plt.show()
    if nx.is_connected(G):
        print("graph is being saved")
        print(len(G))
        nx.write_yaml(G, 'singlehouse_extreme.yaml')
Example #26
0
def build_cgraph_lib(ffn_netlist, dir_graph_out=None, ground=0, remove_dummy=False, dir_node_edge_list=None, dir_lib_out=None, 
                    guess_by_name=True, add_edge_annotation=False):
    """
        ffn_netlist: full file path of netlist to parse
        dir_graph_out: directory path to save the resulting circuit topology graph in yaml format
        dir_lib_out: directory path to save circuit topology graph lib in pickle format
        remove_dummy: when True, remove dummy mos
        guess_by_name: when True, some attributes such as signal type is inferred/guessed from net/instance name
        add_edge_annotation: when True, add matching/group edge constraints to the graph

        return dict (i.e. {circuit_name:networkx.Graph})
    """
    from PySpice.Spice.Parser import SpiceParser
    parser = SpiceParser(path=ffn_netlist)
    circuit = parser.build_circuit(ground=ground)
    
    cgraph_dict = {}
    top_name, ext = os.path.splitext(os.path.basename(ffn_netlist))
    G = build_cgraph(circuit, top_name, guess_by_name=guess_by_name, add_edge_annotation=add_edge_annotation)
    cgraph_dict[top_name] = G

    for c in (circuit.subcircuits):
        for e in (c.elements):
            G = build_cgraph(c, c.name, guess_by_name=guess_by_name, add_edge_annotation=add_edge_annotation)
        cgraph_dict[c.name] = G

    for name, G in cgraph_dict.items():
        if(dir_graph_out is not None):
            nx.write_yaml(G, os.path.join(dir_graph_out, "%s.yaml" % name))
        
        if(dir_node_edge_list is not None):
            node_number = {}
            with open(os.path.join(dir_node_edge_list, "%s.nodelist" % name), 'w') as f:
                for ii, n in enumerate(G.nodes()):
                    f.write("%s %d\n" % (n, ii))
                    node_number[n]=ii
            with open(os.path.join(dir_node_edge_list, "%s.edgelist" % name), 'w') as f:
                for e in G.edges():
                    f.write('%s %s\n' % (node_number[e[0]], node_number[e[1]]))

    if(dir_lib_out is not None):
        with open(os.path.join(dir_lib_out, "%s.cgraph.pkl" % top_name), 'wb') as f:
            pickle.dump(cgraph_dict, f, -1)
    return cgraph_dict
Example #27
0
    def generate_spec_graph(self, input_file):
        filename = ntpath.basename(input_file)
        output = self.config.OUTPUT_DIR
        scenarios = self.parser.parse_spec(input_file)
        graph = nx.MultiDiGraph()

        for scenario in scenarios:
            graph.add_node(f"Scenario:{scenario.name}",
                           source_file=scenario.source_file,
                           smell=False,
                           smell_names=list())
            for i, step in enumerate(scenario.steps):
                graph.add_node(f"Step:{step}", smell=False, smell_names=list())
                graph.add_edge(f"Scenario:{scenario.name}",
                               f"Step:{step}",
                               label=i + 1)

        nx.write_yaml(graph, f"{output}/{filename}.yaml")
        return filename
Example #28
0
def write_graph(graph, filename):
    """Write graph to file, raise IOError if cannot do it."""
    if filename.endswith('.yaml'):
        try:
            nx.write_yaml(graph, filename)
        except ImportError:
            print('E: cannot write graph to file in YAML format.')
            print('Please install PyYAML or other similar package '
                  'to use this functionality.')
            raise IOError
        else:
            print('Write constructed graph to: {0} '
                  'in YAML format.'.format(filename))
    elif filename.endswith('.gml'):
        try:
            nx.write_gml(graph, filename)
        except ImportError:
            print('E: cannot write graph to file in GML format.')
            print('Please install pyparsing package '
                  'to use this functionality.')
            raise IOError
        else:
            print('Write constructed graph to: {0} '
                  'in GML format.'.format(filename))
    elif filename.endswith('.net'):
        nx.write_pajek(graph, filename)
        print('Write constructed graph to: {0} '
              'in PAJEK format.'.format(filename))
    elif filename.endswith('.gexf'):
        graph = exclude_complex_attrs(graph)
        nx.write_gexf(graph, filename)
        print('Write constructed graph to: {0} '
              'in GEXF format.'.format(filename))
    elif filename.endswith('.graphml'):
        graph = exclude_complex_attrs(graph)
        nx.write_graphml(graph, filename)
        print('Write constructed graph to: {0} '
              'in GraphML format.'.format(filename))
    else:
        with open(filename, 'wb') as f:
            pickle.dump(graph, f, protocol=pickle.HIGHEST_PROTOCOL)
        print('Write constructed graph to: {0} '
              'in pickle format.'.format(filename))
Example #29
0
File: io.py Project: budnyjj/vkstat
def write_graph(graph, filename):
    """Write graph to file, raise IOError if cannot do it."""
    if filename.endswith('.yaml'):
        try:
            nx.write_yaml(graph, filename)
        except ImportError:
            print('E: cannot write graph to file in YAML format.')
            print('Please install PyYAML or other similar package '
                  'to use this functionality.')
            raise IOError
        else:
            print('Write constructed graph to: {0} '
                  'in YAML format.'.format(filename))
    elif filename.endswith('.gml'):
        try:
            nx.write_gml(graph, filename)
        except ImportError:
            print('E: cannot write graph to file in GML format.')
            print('Please install pyparsing package '
                  'to use this functionality.')
            raise IOError
        else:
            print('Write constructed graph to: {0} '
                  'in GML format.'.format(filename))
    elif filename.endswith('.net'):
        nx.write_pajek(graph, filename)
        print('Write constructed graph to: {0} '
              'in PAJEK format.'.format(filename))
    elif filename.endswith('.gexf'):
        graph = exclude_complex_attrs(graph)
        nx.write_gexf(graph, filename)
        print('Write constructed graph to: {0} '
              'in GEXF format.'.format(filename))
    elif filename.endswith('.graphml'):
        graph = exclude_complex_attrs(graph)
        nx.write_graphml(graph, filename)
        print('Write constructed graph to: {0} '
              'in GraphML format.'.format(filename))
    else:
        with open(filename, 'wb') as f:
            pickle.dump(graph, f, protocol=pickle.HIGHEST_PROTOCOL)
        print('Write constructed graph to: {0} '
              'in pickle format.'.format(filename))
    def save_experiment(self, expr_dir='expr', include_module_classes=False):
        '''Save your experiment configurations to yaml files'''
        import yaml
        # Save locations 
        if not os.path.exists(expr_dir):
            os.mkdir(expr_dir)
        loc_file = os.path.join(expr_dir, "locations.yaml")
        top_file = os.path.join(expr_dir, 'topology.yaml')
        serv_file = os.path.join(expr_dir, 'services.yaml')

        self._locs.save_to_yaml(loc_file)
        if type(self._top) == dict:
            # Save dict
            with open(top_file, 'w') as s:
                yaml.dump(self._top, s)
        else:
            # This is networkx file
            nx.write_yaml(self._top, top_file)

        dump_serv = {}
        services = dict()
        for k, pt in self._servs.items():
            new_pt = pt.config.repr()
            if type(pt.service_map) == dict:
                serv = dict()
                for sk, sc in pt.service_map.items():
                    if not sc:
                        serv[sk.__name__] = sc
                    else:
                        serv[sk.__name__] = sc.repr()
                        
                    services[sk.__name__] = sk if include_module_classes else None
            else:
                serv = tuple(k.__name__ for k in pt.service_map)
                if include_module_classes:
                    services.update({k.__name__: k for k in pt.service_map})
                else:
                    services.update({k.__name__: None for k in pt.service_map})

            dump_serv[k] = PeerType(new_pt, serv)

        with open(serv_file, 'w') as s:
            yaml.dump([dump_serv, services], s)
Example #31
0
 def write_graph(self, G=None, subgraph_file=None):
     if G is None:
         G = self.context_graph
     if subgraph_file is None:
         subgraph_file = self.context_graph_file
     logging.info("Writing graph.")
     # write the graph out
     file_format = subgraph_file.split(".")[-1]
     if file_format == "graphml":
         nx.write_graphml(G, subgraph_file)
     elif file_format == "gml":
         nx.write_gml(G, subgraph_file)
     elif file_format == "gexf":
         nx.write_gexf(G, subgraph_file)
     elif file_format == "net":
         nx.write_pajek(G, subgraph_file)
     elif file_format == "yaml":
         nx.write_yaml(G, subgraph_file)
     elif file_format == "gpickle":
         nx.write_gpickle(G, subgraph_file)
     else:
         print "File format not found, writing graphml."
         nx.write_graphml(G, subgraph_file)
Example #32
0
 def write_graph(self, G=None, subgraph_file=None):
     if G is None:
         G = self.context_graph
     if subgraph_file is None:
         subgraph_file = self.context_graph_file
     logging.info("Writing graph.")
     # write the graph out
     file_format = subgraph_file.split(".")[-1]
     if file_format == "graphml":
         nx.write_graphml(G, subgraph_file)
     elif file_format == "gml":
         nx.write_gml(G, subgraph_file)
     elif file_format == "gexf":
         nx.write_gexf(G, subgraph_file)
     elif file_format == "net":
         nx.write_pajek(G, subgraph_file)
     elif file_format == "yaml":
         nx.write_yaml(G, subgraph_file)
     elif file_format == "gpickle":
         nx.write_gpickle(G, subgraph_file)
     else:
         print "File format not found, writing graphml."
         nx.write_graphml(G, subgraph_file)
Example #33
0
    def save_cell_graph(self, filename='cell_graph.gml', format='gml'):
        """Save the cell to cell connectivity graph in a file.

        """
        start = datetime.now()
        if format == 'gml':
            nx.write_gml(self.__cell_graph, filename)
        elif format == 'pickle':
            nx.write_gpickle(self.__cell_graph, filename)
        elif format == 'yaml':
            nx.write_yaml(self.__cell_graph, filename)
        elif format == 'graphml':
            nx.write_graphml(self.__cell_graph, filename)
        elif format == 'edgelist':
            nx.write_edgelist(self.__cell_graph, filename)
        else:
            raise Exception('Not supported: %s' % (format))
        end = datetime.now()
        delta = end - start
        config.BENCHMARK_LOGGER.info(
            'Saved cell graph in file %s of type %s in %g s' %
            (filename, format, delta.seconds + 1e-6 * delta.microseconds))
        print 'Saved cell-to-cell connectivity data in', filename
Example #34
0
    def save_cell_graph(self, filename="cell_graph.gml", format="gml"):
        """Save the cell to cell connectivity graph in a file.

        """
        start = datetime.now()
        if format == "gml":
            nx.write_gml(self.__cell_graph, filename)
        elif format == "pickle":
            nx.write_gpickle(self.__cell_graph, filename)
        elif format == "yaml":
            nx.write_yaml(self.__cell_graph, filename)
        elif format == "graphml":
            nx.write_graphml(self.__cell_graph, filename)
        elif format == "edgelist":
            nx.write_edgelist(self.__cell_graph, filename)
        else:
            raise Exception("Not supported: %s" % (format))
        end = datetime.now()
        delta = end - start
        config.BENCHMARK_LOGGER.info(
            "Saved cell graph in file %s of type %s in %g s"
            % (filename, format, delta.seconds + 1e-6 * delta.microseconds)
        )
        print "Saved cell-to-cell connectivity data in", filename
Example #35
0
def main(out_dir, direction, num_graphs, num_nodes, graph_type, graph_params, eta,
         require_connected=False, require_invertible=False, require_stationary=False):


    graph_params = yaml.safe_load(graph_params)

    out_path = Path(out_dir) / graph_type
    out_path = out_path / '/'.join([f'{param}_{graph_params[param]}' for param in sorted(graph_params.keys())])
    out_path.mkdir(parents=True, exist_ok=True)
    graph_params_str = out_path / '/'.join([f'{param}_{graph_params[param]}' for param in sorted(graph_params.keys())])

    i = 0

    while i < num_graphs:
        i_d = str(uuid4())


        graph = make_graph(graph_type, graph_params, eta, require_connected=require_connected,
                                  require_invertible=require_invertible, require_stationary=require_stationary)


        out_loc = out_path / (graph.graph['id'] + '.yaml')
        nx.write_yaml(graph, out_loc)
        i+=1
Example #36
0
def add_new_submission(submission, outname='indication_paths.yaml'):

    if submission is not None:
        print('Building Indications...')

        indications = nx.read_yaml('indication_paths.yaml')
        out = []

        # Ensure nothing is duplicated
        for path in indications + submission:

            # Ensure that we have a list for all references
            if path.get('reference'):
                path['reference'] = references_to_list(path)

            if not is_path_in_paths(path, out):
                out.append(path)

        out = create_ids(out)
        out = filter_deprecated_ids(out)
        nx.write_yaml(out, outname, indent=4)
        print('Build Successful')
    else:
        sys.exit(125)
Example #37
0
 def on_actionYAML_activated(self):
     """
     Slot documentation goes here.
     """
     if self.network:
         nx.write_yaml(self.network.G, self.filename.split('.')[0]+'.yaml')
Example #38
0
 def to_yaml(self):
     "Exports the full graph to yaml"
     file_name = raw_input("Save as: ")
     nx.write_yaml(self.graph,
                   file_name+".yaml",
                   encoding='utf-8')
def save_graph(G, path):
    nx.write_yaml(G, os.path.join('./corpus', path))
Example #40
0
def genGraphRAMON(token_name, channel, graphType="graphml", xmin=0, xmax=0, ymin=0, ymax=0, zmin=0, zmax=0):
  """Generate the graph based on different inputs"""
  
  # converting all parameters to integers
  [xmin, xmax, ymin, ymax, zmin, zmax] = [int(i) for i in [xmin, xmax, ymin, ymax, zmin, zmax]]
  proj = NDProject.fromTokenName(token_name)

  with closing (ramondb.RamonDB(proj)) as db:
    ch = proj.getChannelObj(channel)
    resolution = ch.resolution

    cubeRestrictions = xmin + xmax + ymin + ymax + zmin + zmax
    matrix = []
    
    # assumption that the channel is a neuron channel
    if cubeRestrictions != 0:
      idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax)
    else:
      # entire cube
      [xmax, ymax, zmax] = proj.datasetcfg.get_imagesize(resolution)
      idslist = getAnnoIds(proj, ch, resolution, xmin, xmax, ymin, ymax, zmin, zmax)

    if idslist.size == 0:
      logger.error("Area specified x:{},{} y:{},{} z:{},{} is empty".format(xmin, xmax, ymin, ymax, zmin, zmax))
      raise NDWSError("Area specified x:{},{} y:{},{} z:{},{} is empty".format(xmin, xmax, ymin, ymax, zmin, zmax))

    annos = {}
    for i in idslist:
      tmp = db.getAnnotation(ch, i)
      if int(db.annodb.getAnnotationKV(ch, i)['ann_type']) == annotation.ANNO_SYNAPSE:
        annos[i]=[int(s) for s in tmp.getField('segments').split(',')]

    # create and export graph
    outputGraph = nx.Graph()
    for key in annos:
      outputGraph.add_edges_from([tuple(annos[key])])

  try:
    
    f = tempfile.NamedTemporaryFile()
    if graphType.upper() == "GRAPHML":
      nx.write_graphml(outputGraph, f)
    elif graphType.upper() == "ADJLIST":
      nx.write_adjlist(outputGraph, f)
    elif graphType.upper() == "EDGELIST":
      nx.write_edgelist(outputGraph, f)
    elif graphType.upper() == "GEXF":
      nx.write_gexf(outputGraph, f)
    elif graphType.upper() == "GML":
      nx.write_gml(outputGraph, f)
    elif graphType.upper() == "GPICKLE":
      nx.write_gpickle(outputGraph, f)
    elif graphType.upper() == "YAML":
      nx.write_yaml(outputGraph, f)
    elif graphType.upper() == "PAJEK":
      nx.write_net(outputGraph, f)
    else:
      nx.write_graphml(outputGraph, f)
    f.flush()
    f.seek(0)
  
  except:
    
    logger.error("Internal file error in creating/editing a NamedTemporaryFile")
    f.close()
    raise NDWSError("Internal file error in creating/editing a NamedTemporaryFile")

  return (f, graphType.lower())
Example #41
0
 def test_all(self):
     write_yaml(self.parser.merged_graph, 'target/gaf-graph.yaml')
Example #42
0
 def _dump_graph_yaml(self, path):
     net.write_yaml(self.graph, path)
Example #43
0
 def mouseReleaseEvent(self,event):
     QtGui.QGraphicsItemGroup.mouseReleaseEvent(self,event)
     self.node.pos = [self.pos().x(),self.pos().y()]
     networkx.write_yaml(self.graph,"/tmp/test.yaml")
 def save(self):
   nx.write_yaml(self.graph,'graph.yaml')
Example #45
0
def create_graph():  
    g = nx.Graph()
    xml_docs = Collection()
    xml_docs_subset = xml_docs.get_docs(author="Wau Holland")
    docs_no = len(xml_docs_subset)
    id_dict = dict()
    stems_dict = dict()
    doc_id = 1
    
    print "Put stems into a dict for each document (with an uniq id) ..."
    print "Create nodes with all the documents' relevant information ..."
    pb = ProgressBar(maxval=docs_no).start()
    
    for xml_doc in xml_docs_subset:
        pb.update(doc_id)
        id_dict[xml_doc.get_xml_filename()] = doc_id
        g.add_node(doc_id, 
                   id = xml_doc.get_id(),
                   rawlen = xml_doc.get_rawlen(),
                   subj = xml_doc.get_subj(),
                   author = xml_doc.get_author(),
                   date = xml_doc.get_date(),
                   words = xml_doc.get_words(),
                   uniq_stems = list(xml_doc.get_stems(uniq=True, 
                                                       relev=True)),
                   rawcontent = xml_doc.get_rawcontent()
                   )
        doc_id += 1
        # It seems sometimes a list (-> set conversion) gets returned 
        # ... ugly. XXX
        stems_dict[doc_id] = set(xml_doc.get_stems(uniq=True, relev=True))
        
    print "Create undirected, weighted graph based on Jaccard similarity ..."
    no_of_edges = docs_no * (docs_no - 1) / 2
    pb = ProgressBar(maxval=no_of_edges).start()
    count = 1
    for doc_idx1 in stems_dict.keys():
        doc_idx2 = doc_idx1 + 1
        # Nothing left to compare
        if (doc_idx1 == docs_no):
            break
    
        while True:
            # print "Comparing: ", doc_idx1, doc_idx2
            
            # Find longer doc
            doc1_len, doc2_len = len(stems_dict[doc_idx1]), \
                                    len(stems_dict[doc_idx2])
            long_doc_len = max((doc1_len, doc2_len))
            short_doc_len = min((doc1_len, doc2_len))
            
            # In case a document has no useful stems to classify
            edge_weight = 0
            alias_coeff = 0
            if long_doc_len == 0 or short_doc_len == 0:
                pass
            else:
                alias_coeff = float(long_doc_len) / short_doc_len
            
                edge_weight = (1 - jaccard_distance(stems_dict[doc_idx1],
                                           stems_dict[doc_idx2])) \
                           * alias_coeff
                           
            print alias_coeff, edge_weight
            
            # Still redundant, only for testing
            if (edge_weight > 0.3):
                cluster_stems = stems_dict[doc_idx1].intersection(
                               stems_dict[doc_idx2])
                try: 
                    g.node[doc_idx1]['cluster_stems']
                except KeyError:
                    g.node[doc_idx1]['cluster_stems'] = cluster_stems
                else:
                    for stem in cluster_stems:
                        g.node[doc_idx1]['cluster_stems'].add(stem)
                try: 
                    g.node[doc_idx2]['cluster_stems']
                except KeyError:
                    g.node[doc_idx2]['cluster_stems'] = cluster_stems
                else:
                    for stem in cluster_stems:
                        g.node[doc_idx2]['cluster_stems'].add(stem)
            
            # To be made more flexible
            if edge_weight > 0.3:
                g.add_edge(doc_idx1, doc_idx2, weight=edge_weight)
            doc_idx2 += 1
            pb.update(count)
            count += 1
            if doc_idx2 > docs_no:
                break
    
    print "Draw graph showing possible clusters  ..."
    
    elarge = [(u,v) for (u,v,d) in g.edges(data=True) if d['weight'] > 0.4]
    emedium = [(u,v) for (u,v,d) in g.edges(data=True) 
              if d['weight'] > 0.2 and d['weight'] < 0.4]
    esmall = [(u,v) for (u,v,d) in g.edges(data=True) if d['weight'] <= 0.2]
    print "elarge: ", len(elarge)
    print "emedium: ", len(emedium)
    print "esmall: ", len(esmall)
       
    pos = nx.spring_layout(g, scale=20)
    #pos = nx.random_layout(g)
    
    dlarge = [n for n,d in g.degree_iter() if d >= 20]
    dmedium = [n for n,d in g.degree_iter() if d > 1 and d < 20]
    dsmall = [n for n,d in g.degree_iter() if d == 1]
    dnone = [n for n,d in g.degree_iter() if d == 0]
    print "dlarge: ", len(dlarge)
    print "dmedium: ", len(dmedium)
    print "dsmall: ", len(dsmall)
    print "dnone: ", len(dnone)
    
    # Draw nodes
    # nx.draw_networkx_nodes(g, pos, node_size=5, linewidths=0)
    nx.draw_networkx_nodes(g, pos, nodelist=dlarge, node_size=20,
                           node_color='b',
                           linewidths=0)
    nx.draw_networkx_nodes(g, pos, nodelist=dmedium, node_size=10,
                           node_color='g',
                           alpha=0.8, 
                           linewidths=0)
    nx.draw_networkx_nodes(g, pos, nodelist=dsmall, node_size=5,
                           node_color='b',
                           alpha=0.2,
                           linewidths=0,
                           )
    nx.draw_networkx_nodes(g, pos, nodelist=dnone, node_size=5,
                           node_color='b',
                           alpha=0.2, 
                           linewidths=0)
    
    # Draw edges
    nx.draw_networkx_edges(g, pos, edgelist=elarge, width=0.4)
    nx.draw_networkx_edges(g, pos, edgelist=emedium, edge_color='g', 
                           alpha=0.8, width=0.2)
    nx.draw_networkx_edges(g, pos, edgelist=esmall, width=0.1,
                           alpha=0.1, edge_color='b')
    
    # Draw labels
    # nx.draw_networkx_labels(g, pos, font_size=1, font_family='sans-serif')
    
    plt.axis('off')
    plt.figure(1, figsize=(20,20))
    """
    print "Print PNG"
    plt.savefig("graph.png", dpi=600)
    """
    # plt.show()
    nx.write_yaml(g, get_graph_file())
    d3_js.export_d3_js(g)
 def export_yaml(self,fn=None):
     if not self._graph:
         self.convert()
     write_yaml(self._graph, fn)
Example #47
0
 def write_graph(self, outfile):
     nx.write_yaml(self.graph, outfile)
Example #48
0
import json

if __name__ == '__main__':
    graph = nx.read_edgelist('input/example_graph.edgelist', nodetype=int, data=(('weight', float),))
    assert isinstance(graph, nx.Graph)
    print 'edges:', graph.edges()

    # raw
    nx.write_adjlist(graph, 'output_raw/example_graph.adjlist')
    nx.write_multiline_adjlist(graph, 'output_raw/example_graph.multiline_adjlist')
    nx.write_edgelist(graph, 'output_raw/example_graph.edgelist')

    # better serialization
    nx.write_gpickle(graph, 'output_serialization/example_graph.pickle')
    nx.write_yaml(graph, 'output_serialization/example_graph.yaml')
    nx.write_graph6(graph, 'output_serialization/example_graph.graph6')

    # xml
    nx.write_gexf(graph, 'output_xml/example_graph.gexf')
    nx.write_graphml(graph, 'output_xml/example_graph.graphml')

    # json
    with open('output_json/node_link.json', 'w') as outfile:
        json.dump(json_graph.node_link_data(graph), outfile, indent=2)

    with open('output_json/adjacency.json', 'w') as outfile:
        json.dump(json_graph.adjacency_data(graph), outfile, indent=2)

    # other
    nx.write_gml(graph, 'output_other/example_graph.gml')
Example #49
0
def gen_data(graph_path, lda_path, user_lim=200, user_wb_lim=200):

    import jieba
    import jieba.posseg as pseg
    jieba.load_userdict(u"/etc/jieba/jieba.dic")

    G = nx.Graph()
    ldaf = open(lda_path, 'w')
    ldaf.write("%d\n" % user_lim)
    ucnt = 0
    for item in Weibo.objects.values('owner').annotate(cnt=Count('owner')):
        if item['cnt']>450:
            user = Account.objects.get(id=item['owner'])
            logging.info(u'%5d Dealing with %s' % (ucnt, user))
            logging.info(u'Current graph:%d nodes and %d edges' % (G.number_of_nodes(), G.number_of_edges()))
            user_words = []
            for wb in user.ownweibo.order_by("-created_at").all()[:user_wb_lim]:
                #filter(retweeted_status__exact=None).all():
                text = wb.text.lower()
                #TODO
                #if wb.retweeted_status:
                #   text = wb.retweeted_status.text.lower() + text

                text = re.sub("@[^\s@:]+", "", text)
                text = re.sub(u"http://t.cn[^ ]*", u"", text)
                text = re.sub(u"\[[^ ]{1,3}\]", u"", text)
                for word in re.findall(u"【.+?】|#.+?#|《.+?》|“.+?”|\".+?\"", text):
                    for w in pseg.cut(word):
                        if len(w.word)<2 or w.word in Config.STOP_WORDS or 'n' not in w.flag:
                            continue
                        wd = w.word.encode('utf-8')
                        if G.has_node(wd) and 'weight' in G.node[wd]:
                            G.node[wd]['weight'] += 1.0
                        else:
                            G.add_node(wd, weight=1.0)

                wb_words = []
                for w in pseg.cut(text):
                    if len(w.word)>1 and 'n' in w.flag and w.word not in Config.STOP_WORDS:
                        wb_words.append(w.word.encode('utf-8'))
                if not wb_words:
                    continue
                for w1, w2, w3 in zip(wb_words[:-2], wb_words[1:-1], wb_words[2:]):
                    if not G.has_edge(w1, w2):
                        G.add_edge(w1, w2, weight=1.0)
                    else:
                        G[w1][w2]['weight'] += 1.0
                    if not G.has_edge(w1, w3):
                        G.add_edge(w1, w3, weight=1.0)
                    else:
                        G[w1][w3]['weight'] += 1.0
                user_words.extend(wb_words)

            if not user_words:
                continue
            ldaf.write(' '.join(user_words)+'\n')
            ucnt += 1
            if ucnt>=user_lim:
                break

    if ucnt<user_lim:
        logging.error("no enough docs, %d/%d" % (ucnt, user_lim))

    if graph_path:
        nx.write_yaml(G, graph_path, encoding='UTF-8')
    ldaf.close()

    return G
 def write_graph_file(self):
     '''
     Write the graph file to disk.
     '''
     networkx.write_yaml(self.graph, GRAPH_FILE)
Example #51
0
	def saveUrlGraph(self):
		filefloder = r'../data/'
		filename = filefloder + 'whutgraph.yaml'
		nx.write_yaml(self.DG,filename)
		nx.draw(self.DG)
		plt.savefig("whutnetwork.png")
Example #52
0
 def saveGraph(self, filename):
     """Saves the graph as a YAML file
     """
     nx.write_yaml(self.G,filename)
 def save(self):
     nx.write_yaml(self.graph, "graph.yaml")
    pass
try:
    g.write_picklez("user user org.picz")
except:
    pass

g.write_adjacency("user user org.adj")

g.write_svg("user user org.svg")


"""


print('initialising module .. reading graph')
# g=Graph.Read_Pickle("user user org.pic")
# g2=nx.Graph()
# g3=nx.read_gpickle("user user org.pic")
g=nx.read_gpickle("user user org.pic")
# print g2
g=nx.convert_node_labels_to_integers(g)
print 'writing pajek'
# nx.write_pajek(g,"org_mem.net")
nx.write_adjlist(g,"org_mem.adj")
nx.write_yaml(g,"org_mem.yaml")
# plt.show()
# print('reading graph done.. layout graph initilaizing')
# layout = g.layout("")
# print('done.. strting to plot graph')
# plot(g, layout = layout)
# print 'plot done :D'
Example #55
0
        else:
            wot_file = open(args.wot_file, "rb")
    else:
        wot_file = latest_wot()

    logging.info("Parsing files...")
    G = read_wot(wot_file)
    logging.info("Read {0} keys, {1} signatures".format(nx.number_of_nodes(G),
                                                        nx.number_of_edges(G)))

    logging.info("Filtering...")
    if args.mutual:
        G = G.to_undirected(reciprocal=True)
        G = nx.connected_component_subgraphs(G)[0]

    if args.key:
        G = nx.ego_graph(G, args.key, radius=args.radius, undirected=True)

    logging.info("Writing output file...")
    if args.output:
        outfile = open(args.output, "wb")
    else:
        outfile = sys.stdout.buffer

    if args.file_format == "gexf":
        nx.write_gexf(G, outfile)
    elif args.file_format == "graphml":
        nx.write_graphml(G, outfile)
    elif args.file_format == "yaml":
        nx.write_yaml(G, outfile)
Example #56
0
    
    #data['pwy'] = ','.join(sorted(set(data['pwy'])))

for nA,nB,data in G.edges(data=True):
    
    w = data['weight']
    occA = G.node[nA]['occ']
    occB = G.node[nB]['occ']

    print(nA, nB, occA, occB, w)
    data['nw'] = w ** 2 / (occA + occB - w)
    data['stanza'] = '/'.join(data['stanza'])


nx.write_gml(G, 'R_shijing.gml')
nx.write_yaml(G, 'R_shijing.yaml')
import html.parser

with open('R_shijing.gml') as f:
    shijing = f.read()
with open('R_shijing.gml', 'w') as f:
    f.write(html.parser.unescape(shijing))

# get chars which are unresolved
M = {}
for k in wl:
    

    ocbsyun = wl[k,'ocbsyun']
    rid = wl[k,'rhymeid']
    char = wl[k,'character']
Example #57
0
def genGraphRAMON(database, project, channel, graphType="graphml", Xmin=0, Xmax=0, Ymin=0, Ymax=0, Zmin=0, Zmax=0,):
  cubeRestrictions = int(Xmin) + int(Xmax) + int(Ymin) + int(Ymax) + int(Zmin) + int(Zmax)

  conn = MySQLdb.connect(host=settings.DATABASES['default']['HOST'], user=settings.DATABASES['default']['USER'], passwd=settings.DATABASES['default']['PASSWORD'], db=project.getProjectName())

  matrix = []

  if cubeRestrictions != 0:
    idslist = getAnnoIds(project, channel, Xmin, Xmax, Ymin, Ymax, Zmin, Zmax)
    if (idslist.size) == 0:
      logger.warning("Area specified is empty")
      raise OCPCAError("Area specified is empty")

    with closing(conn.cursor()) as cursor:
      for i in range(idslist.size):
        cursor.execute(("select kv_value from {} where kv_key = 'synapse_segments' and annoid = {};").format(
            channel.getKVTable(""), idslist[i]))
        matrix.append(cursor.fetchall()[0])
  else:
    with closing(conn.cursor()) as cursor:
      cursor.execute(("select kv_value from {} where kv_key = 'synapse_segments';").format(
          channel.getKVTable("")))
      matrix = cursor.fetchall()

  synapses = np.empty(shape=(len(matrix), 2))
  rawstring = (matrix[0])[0]
  splitString = rawstring.split(",")

  if len(splitString) == 2:
    # For kv pairs with 127:0, 13:0 (for example)
    for i in range(len(matrix)):
        # Get raw from matrix
      rawstring = (matrix[i])[0]
      splitString = rawstring.split(",")

      # Split and cast the raw string
      synapses[i] = [int((splitString[0].split(":"))[0]), int((splitString[1].split(":"))[0])]
  else:
    # for kv pairs with just 4:5
    for i in range(len(matrix)):
      # Get raw from matrix
      rawstring = (matrix[i])[0]
      # Split and cast the raw string
      synapses[i] = rawstring.split(":")

  # Create and export graph
  outputGraph = nx.Graph()
  outputGraph.add_edges_from(synapses)

  if graphType.upper() == "GRAPHML":
    nx.write_graphml(outputGraph, ("/tmp/{}_{}.graphml").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.graphml").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "ADJLIST":
    nx.write_adjlist(outputGraph, ("/tmp/{}_{}.adjlist").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.adjlist").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "EDGELIST":
    nx.write_edgelist(outputGraph, ("/tmp/{}_{}.edgelist").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.edgelist").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "GEXF":
    nx.write_gexf(outputGraph, ("/tmp/{}_{}.gexf").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.gexf").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "GML":
    nx.write_gml(outputGraph, ("/tmp/{}_{}.gml").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.gml").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "GPICKLE":
    nx.write_gpickle(outputGraph, ("/tmp/{}_{}.gpickle").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.gpickle").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "YAML":
    nx.write_yaml(outputGraph, ("/tmp/{}_{}.yaml").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.yaml").format(project.getProjectName(), channel.getChannelName())
  elif graphType.upper() == "PAJEK":
    nx.write_net(outputGraph, ("/tmp/{}_{}.net").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.net").format(project.getProjectName(), channel.getChannelName())
  else:
    nx.write_graphml(outputGraph, ("/tmp/{}_{}.graphml").format(
        project.getProjectName(), channel.getChannelName()))
    return ("/tmp/{}_{}.graphml").format(project.getProjectName(), channel.getChannelName())
Example #58
0
for node,data in _G.nodes(data=True):
    G.add_vertex(name=node, **data)
for nodeA, nodeB, data in _G.edges(data=True):
    G.add_edge(nodeA, nodeB, **data)



# do the same for the N graph
_N = igraph.Graph()
for node,data in N.nodes(data=True):
    _N.add_vertex(name=node, **data)
for nodeA, nodeB, data in N.edges(data=True):
    if data['occurrence'] >= 10:
        _N.add_edge(nodeA, nodeB, **data)

nx.write_yaml(N, 'R_rime_graph.yaml') 


if 'communities' in argv:
    C = _N.community_infomap(
            edge_weights = 'occurrence',
            vertex_weights = 'occurrence'
            )
    for community,name in zip(C.membership, _N.vs['name']):
        N.node[name]['infomap'] = community

    print('[i] Calculated communities for rhyme groups')
    
    # check statistics, for example, get
    
    C = G.community_infomap(