def graph(): fh = open ('data/jamia/data_out2.json','r') data = json.load(fh) nodelist = set() for d in data: nodelist.add(d['source']) nodelist.add(d['target']) gexf = Gexf("Yiye Zhang","Test graph") graph=gexf.addGraph("directed", "static","testing graph") for i in nodelist: graph.addNode(i,i) k=0 for d in data: graph.addEdge(k,d['source'],d['target'], weight=d['weight']) k=k+1 output_file=open("app/static/g.gexf","w") gexf.write(output_file) return render_template('graph.html')
def main(): gexf = Gexf("Politikon","Autoreferences graph") graph=gexf.addGraph("directed","static","a hello world graph") tree = ET.parse(INPUT_WP_XML) root = tree.getroot() orphan = [] for item in root[0].findall('item'): if re.match("http://politikon.es/\d\d\d\d/\d\d/\d\d/([\w*-]*)/", item.find('link').text): post_id = re.match("http://politikon.es/\d\d\d\d/\d\d/\d\d/([\w*-]*)/", item.find('link').text).group(1) post_link = item.find('link').text r,g,b = get_color(item.find('{http://purl.org/dc/elements/1.1/}creator').text) graph.addNode(id=str(post_id),label=str(post_link), r=str(r), g=str(g), b=str(b)) links = get_post_links(item.find('{http://purl.org/rss/1.0/modules/content/}encoded').text) if links: for link in links: link_id = re.match("http://politikon.es/\d\d\d\d/\d\d/[\d\d/]*([\w*-]*)/", link).group(1) if link_id in orphan: orphan.remove(link_id) if graph.nodeExists(link_id): graph.addEdge(post_id+"->"+link_id, post_id, link_id) else: orphan.append(post_id) for o in orphan: del graph._nodes[o] output_file=open(GEXF_OUTPUT_FILE,"w") gexf.write(output_file)
def write_gexf(G, fpath): """Convert an igraph graph to a gexf file.""" gexf = Gexf("QNATool", config.project_name) gexf_graph = gexf.addGraph( 'directed', 'dynamic', config.project_name, timeformat='date') # add custom attributes to edges and nodes gexf_graph.addEdgeAttribute('article_path', 0, type='string', force_id='article_path') gexf_graph.addEdgeAttribute('sentiment', 0, force_id='sentiment') gexf_graph.addEdgeAttribute('sentences', 0, type='string', force_id='sentences') gexf_graph.addNodeAttribute('community', 0, force_id='community') gexf_graph.addNodeAttribute('kind', 0, type='string', force_id='kind') gexf_graph.addNodeAttribute('frequency', 0, force_id='frequency') for vertex in G.vs: add_gexf_node(vertex.index, G, gexf_graph) for source_id, target_id in G.get_edgelist(): add_gexf_edge(source_id, target_id, G, gexf_graph) f = open(fpath, 'w') gexf.write(f)
def build_gexf(edges, out_name, p_sample = 1): if not Gexf_loaded: print 'Could not load Gexf from module gexf.' return gexf = Gexf("snikolov", out_name) graph = gexf.addGraph('directed', 'dynamic', out_name) end = str(max([edge[2] for edge in edges])) for (src, dst, time) in edges: if np.random.rand() > p_sample: continue # Assumes time is in epoch seconds #d = datetime.datetime.fromtimestamp(int(time)) #date = d.isoformat() start = str(time) if src != -1: if not graph.nodeExists(src) or start < graph._nodes[src].start: graph.addNode(id = src, label = '', start = start, end = end) if not graph.nodeExists(dst) or start < graph._nodes[dst].start: graph.addNode(id = dst, label = '', start = start, end = end) graph.addEdge(id = str(src) + ',' + str(dst), source = src, target = dst, start = start, end = end) else: if not graph.nodeExists(dst): graph.addNode(id = dst, label = '', start = start, end = end) out = open('/Users/snikolov/projects/twesearch/data/graphs/' + out_name + '.gexf', 'w') gexf.write(out) out.close()
def main(): if len(sys.argv) != 4: sys.exit("Format: parse_output.py infopath-output.txt timesteps.txt timestamp") infopath_output_file = sys.argv[1] timesteps_file = sys.argv[2] ts_to_gen = int(sys.argv[3]) out_file = "%s-at-%d.gexf" % (infopath_output_file.split('.')[0], ts_to_gen) fin = open(infopath_output_file, 'r') fts = open(timesteps_file, 'r') fout = open(out_file, 'w') gexf = Gexf('tribhu_infopath','31-03-2014') graph = gexf.addGraph('directed','dynamic','31-03-2014') # Add all users as nodes for line in fin: if ',' not in line: break uid, uname = line.split(',') uid = int(uid) # graph.addNode(str(uid), uname[-1]) timesteps = map(lambda x: float(x.strip()), fts.readlines()) first_ts = timesteps[0] last_ts = timesteps[-1] # # Create a dict: timstamp -> [ (source, target, weight), ()...] # ts_to_edges_dct = defaultdict(list) for line in fin: vals = map(float, line.split(',')) source, target = map(int, vals[0:2]) # Obtain a list of pairs <timestamp, weight> edge_weights = [(int(vals[i]), float(vals[i+1])) for i in range(2, len(vals[2:])-1, 2)] for ts, weight in edge_weights: ts_to_edges_dct[ts] += [(source, target, weight), ] # Dump the required timestamps into an output file added_nodes = set() for source, target, weight in ts_to_edges_dct[ts_to_gen]: if source not in added_nodes: added_nodes.add(source) graph.addNode(str(source), str(source)) if target not in added_nodes: added_nodes.add(target) graph.addNode(str(target), str(target)) edge_id = "%d_to_%d_at_%f" % (source, target, ts) graph.addEdge(edge_id, str(source), str(target), weight=weight, label=edge_id) gexf.write(fout)
def perm_to_app_graph_all(apps, p_maps, categories): print 'perm_to_app_graph start ', 'all' gexf_a = Gexf('Jianhua Shao', 'all') graph_a = gexf_a.addGraph('directed', 'static', 'all') attr_node_a = graph_a.addNodeAttribute('node_type', 'app', 'string') attr_edge_a = graph_a.addEdgeAttribute('edge_type', 'no', 'string') for p_id in p_maps: p_label = p_maps[p_id] n_a = graph_a.addNode(p_id, '%s_%s'%(str(p_id), p_label)) n_a.addAttribute(attr_node_a, 'p') app_ids_a = {} i_t = len(apps) i_i = 0 p = 0 for app_id in apps: p, i_i = p_percent(p, i_i, i_t, 10) app = apps[app_id] if not app.has_key('developer'): continue developer_href = app['developer'] if developer_href == '' or developer_href == None: continue if not app.has_key('perms'): continue installs = app['installs'] installs, install_min, install_max, install_average = installs_c(installs) if int(install_max) <= 5: continue rating_average = app['rating_average'] if float(rating_average) == 0: continue category = app['category'].lower().strip() category_id = categories[category] award_editor = 'no' award_developer = 'no' if app.has_key('awards'): for award in app['awards']: award = award.strip().lower() if award == 'top developer': award_developer = 'yes' if award == "editors' choice": award_editor = 'yes' if not app_ids_a.has_key(app_id): app_ids_a[app_id] = 1 n_a = graph_a.addNode(app_id, app_id) n_a.addAttribute(attr_node_a, 'a') ps = app['perms'] for p_id in ps: e_a = graph_a.addEdge('%s_%s'%(str(p_id), app_id), p_id, app_id) e_a.addAttribute(attr_edge_a, award_editor) output_file_a = open('./txt/graph/all.gexf', 'w') gexf_a.write(output_file_a) print 'perm_to_app_graph end all '
def generateXml(G, layers): gexf = Gexf("lzu.edu", "network based on IPv6") graph = gexf.addGraph("undirected", "static", "network based on IPv6") # atr1 = graph.addNodeAttribute('ip address',type='string',defaultValue='true') atr1 = graph.addNodeAttribute(force_id='modularity_class', title='Modularity Class', type='integer') f = faker.Faker(locale='zh-CN') print('ipv6:{}'.format(f.ipv6())) nodes = list(G.nodes) edges = list(G.edges) print('nodes:{}'.format(nodes)) print('edges:{}'.format(edges)) activate_nodes = [] for i in range(len(layers)): for j in range(len(layers[i])): activate_nodes.append(layers[i][j]) for i in range(len(nodes)): tmp = graph.addNode(nodes[i], f.ipv6()) attribute_flag = 0 for j in range(len(layers)): if nodes[i] in layers[j]: attribute_flag = j + 1 break tmp.addAttribute(atr1, str(attribute_flag)) for i in range(len(edges)): graph.addEdge(str(i), str(edges[i][0]), str(edges[i][1])) # tmp = graph.addNode("0","Gephi") # tmp.addAttribute(atr1,"http://gephi.org") # tmp = graph.addNode("1","Webatlas") # tmp.addAttribute(atr1,"http://webatlas.fr") # tmp = graph.addNode("2","RTGI") # tmp.addAttribute(atr1,"http://rtgi.fr") # tmp = graph.addNode("3","BarabasiLab") # tmp.addAttribute(atr1,"http://barabasilab.com") # graph.addEdge("0","0","1",weight='1') # graph.addEdge("1","0","2",weight='1') # graph.addEdge("2","1","0",weight='1') # graph.addEdge("3","2","1",weight='1') # graph.addEdge("4","0","3",weight='1') xml_file = os.getcwd( ) + os.sep + 'app' + os.sep + 'static' + os.sep + 'data' + os.sep + 'xml' + os.sep + 'data.xml' output_file = open(xml_file, "wb") gexf.write(output_file)
def getgexf(g): gexf = Gexf('hxq', 'test graph') graph = gexf.addGraph('undirected graph') node_counter = 0 for node in g.nodes(): graph.addNode(node_counter, node) node_couter += 1 edge_counter = 0 for edge in g.edges(): start, end = edge graph.addEdge(edge_counter, start, end) output_file = open('graph.gexf', 'w') gexf.write(output_file)
def convert(self,nodes,ties): gexf = Gexf(config.CREATOR, config.DESCRIPTION) graph = gexf.addGraph(config.DEFAULTEDGETYPE, config.MODE, config.LABEL) # Percorre todos os nós for node in nodes.keys(): for item in nodes[node]: graph.addNode(nodes[node][item]['id'], item) for i in xrange(len(ties)): graph.addEdge(i, ties[i][0], ties[i][1]) output = open(config.OUTPUT_FILE,'w') gexf.write(output) output.close()
def convert(self, nodes, ties): gexf = Gexf(config.CREATOR, config.DESCRIPTION) graph = gexf.addGraph(config.DEFAULTEDGETYPE, config.MODE, config.LABEL) # Percorre todos os nós for node in nodes.keys(): for item in nodes[node]: graph.addNode(nodes[node][item]["id"], item) for i in xrange(len(ties)): graph.addEdge(i, ties[i][0], ties[i][1]) output = open(config.OUTPUT_FILE, "w") gexf.write(output) output.close()
def derived_x_to_y(): txt = open("./dot/derived_x_to_y.gv", "w") txt.write("//%s\n" % (str(datetime.now()))) txt.write("digraph graphname {\n") gexf = Gexf("Jianhua Shao", "Thingiver derivation mapping") graph = gexf.addGraph("directed", "static", "derivation_x_y") # attr_node = graph.addNodeAttribute('url', '', 'string') attr_edge = graph.addEdgeAttribute("c_days", "", "string") # sql = sql_derived_x_to_y sql = sql_derived_x_to_y_created_time param = () c = conn.cursor() c.execute(sql, param) for r in c.fetchall(): print r x = r[1] y = r[2] x_ctime = r[3] y_ctime = r[4] x = x.strip("\n\t/thing:") # x = int(x) y = y.strip("\n\t/thing:") # y = int(y) # print type(x), type(y) # print x, y x_ctime = datetime.strptime(x_ctime, "%b %d, %Y") y_ctime = datetime.strptime(y_ctime, "%b %d, %Y") duration = (y_ctime - x_ctime).days # print duration dot_line = "\t{%s} -> {%s} [label=%s];\n" % (x, y, str(duration)) print dot_line txt.write(dot_line) n_x = graph.addNode(str(x), str(x)) n_y = graph.addNode(str(y), str(y)) # n_x.addAttribute(attr_node, 'string') # n_y.addAttribute(attr_node, 'string') e = graph.addEdge("%s_%s" % (str(x), str(y)), x, y) e.addAttribute(attr_edge, str(duration)) # print e c.close() txt.write("}") txt.close() gexf_file = open("./dot/derived_x_to_y.gexf", "w") gexf.write(gexf_file) print "finish"
def derived_x_to_y(): txt = open('./dot/derived_x_to_y.gv', 'w') txt.write('//%s\n' % (str(datetime.now()))) txt.write('digraph graphname {\n') gexf = Gexf('Jianhua Shao', 'Thingiver derivation mapping') graph = gexf.addGraph('directed', 'static', 'derivation_x_y') #attr_node = graph.addNodeAttribute('url', '', 'string') attr_edge = graph.addEdgeAttribute('c_days', '', 'string') #sql = sql_derived_x_to_y sql = sql_derived_x_to_y_created_time param = () c = conn.cursor() c.execute(sql, param) for r in c.fetchall(): print r x = r[1] y = r[2] x_ctime = r[3] y_ctime = r[4] x = x.strip('\n\t/thing:') #x = int(x) y = y.strip('\n\t/thing:') #y = int(y) #print type(x), type(y) #print x, y x_ctime = datetime.strptime(x_ctime, '%b %d, %Y') y_ctime = datetime.strptime(y_ctime, '%b %d, %Y') duration = (y_ctime - x_ctime).days #print duration dot_line = '\t{%s} -> {%s} [label=%s];\n' % (x, y, str(duration)) print dot_line txt.write(dot_line) n_x = graph.addNode(str(x), str(x)) n_y = graph.addNode(str(y), str(y)) #n_x.addAttribute(attr_node, 'string') #n_y.addAttribute(attr_node, 'string') e = graph.addEdge('%s_%s' % (str(x), str(y)), x, y) e.addAttribute(attr_edge, str(duration)) #print e c.close() txt.write('}') txt.close() gexf_file = open('./dot/derived_x_to_y.gexf', 'w') gexf.write(gexf_file) print "finish"
def developer_to_api_o(): global conn gexf = Gexf("Jianhua Shao", "programaleweb developer to api") graph = gexf.addGraph("directed", "static", "ecosystem") attr_node = graph.addNodeAttribute("n_type", "mashup", "string") gexf_node_developer( developer_to_api_distinct_developer, (), graph, attr_node) gexf_node_api( developer_to_api_distinct_api, (), graph, attr_node) gexf_edge_developer_to_api( developer_to_api, (), graph) output_file = open(gexf_path+"pgw_developer_to_api.gexf", 'w') gexf.write(output_file)
def gen_gexf(ofile='./../gexf/test2.gexf'): import sys, pprint from gexf import Gexf # test helloworld.gexf gexf = Gexf("Gephi.org", "A Web network") graph = gexf.addGraph("directed", "static", "A Web network") atr2 = graph.addNodeAttribute('modularity_class', 'Modularity Class', 'integer') tmp = graph.addNode("0", "A") tmp.addAttribute(atr2, "0") tmp = graph.addNode("1", "B") tmp.addAttribute(atr2, "0") graph.addEdge("0", "0", "1", weight='1') output_file = open(ofile, "w") gexf.write(output_file)
def perm_to_app_graph_each(apps_t, p_maps, categories): for category in apps_t: print 'perm_to_app_graph start ', category category_id = categories[category] apps = apps_t[category] gexf = Gexf('Jianhua Shao', category) graph = gexf.addGraph('directed', 'static', category) attr_node = graph.addNodeAttribute('node_type', 'app', 'string') attr_edge = graph.addEdgeAttribute('edge_type', 'no', 'string') #attr_node_app_award = graph.addNodeAttribute('app_award', 'no', 'string') for p_id in p_maps: p_label = p_maps[p_id] n = graph.addNode(p_id, '%s_%s'%(str(p_id), p_label)) n.addAttribute(attr_node, 'p') app_ids = {} for app_id in apps: app = apps[app_id] award_editor = 'no' award_developer = 'no' if app.has_key('awards'): for award in app['awards']: award = award.strip().lower() if award == 'top developer': award_developer = 'yes' if award == "editors' choice": award_editor = 'yes' if not app_ids.has_key(app_id): app_ids[app_id] = 1 n = graph.addNode(app_id, app_id) n.addAttribute(attr_node, 'a') ps = app['perms'] for p_id in ps: e = graph.addEdge('%s_%s'%(str(p_id), app_id), p_id, app_id) e.addAttribute(attr_edge, award_editor) output_file = open('./txt/graph/%s_%s.gexf'%(str(category_id), category), 'w') gexf.write(output_file) print 'perm_to_app_graph end ', category
tmp.addAttribute(atr2, 'false') tmp = graph.addNode("3", "刘星") tmp.addAttribute(atr1, "1231231") tmp.addAttribute(atr2, 'false') tmp = graph.addNode("4", "夏雨") tmp.addAttribute(atr1, "2314889") tmp.addAttribute(atr2, 'false') tmp = graph.addNode("5", "夏祥") tmp.addAttribute(atr1, "786767") tmp.addAttribute(atr2, 'true') tmp = graph.addNode("6", "范晓英") tmp.addAttribute(atr1, "786767") tmp.addAttribute(atr2, 'true') graph.addEdge("0", "0", "1", label='妻子', weight='1') graph.addEdge("1", "1", "0", label='丈夫', weight='1') graph.addEdge("2", "0", "2", label='继母', weight='1') graph.addEdge("3", "2", "3", label='姐姐', weight='1') graph.addEdge("4", "3", "4", label='哥哥', weight='1') graph.addEdge("5", "0", "3", label='母亲', weight='1') graph.addEdge("6", "0", "4", label='继母', weight='1') graph.addEdge("7", "6", "1", label='母亲', weight='1') graph.addEdge("8", "5", "1", label='父亲', weight='1') output_file = open("home.gexf", "wb") gexf.write(output_file)
graph = gexf.addGraph("directed", "static", "The internet map") attrid = graph.addNodeAttribute("country", "??", type="string") def addIP(ip): if not graph.nodeExists(ip): info = db.lookup(ip) country = "??" if info.country: country = info.country n = graph.addNode(ip, ip) n.addAttribute(attrid, country) n = 0 with open("out.csv") as f: for line in f: ips = tuple(line.strip().replace("-", ".").split('\t')) if len(ips) < 2: continue ip1, ip2 = ips addIP(ip1) addIP(ip2) graph.addEdge("%s->%s" % (ip1, ip2), ip1, ip2) n += 1 print n with open("test.gexf", "w") as f: gexf.write(f)
#!/usr/bin/python from gexf import Gexf,GexfImport # test helloworld.gexf gexf = Gexf("Paul Girard","A hello world! file") graph=gexf.addGraph("directed","static","a hello world graph") graph.addNode("0","hello") graph.addNode("1","World") graph.addEdge("0","0","1") output_file=open("hellowrld.gexf","w") gexf.write(output_file) # test GexfImport f = open("exp.gexf") o = open("exp_bootstrap_bootstrap.gexf", "w+") gexf_import = GexfImport(f).gexf() gexf_import.write(o)
def convert(team_name, channels_list, graph='mention_based_graph_info', user='******', pwd='FluoBySusTech', port=3306, host='10.20.13.209', dbname='rowdata'): from gexf import Gexf from textblob import TextBlob import random import pymysql import pandas as pd try: con = pymysql.Connect( host=host, # 远程登录主机的ip地址 port=port, # mysql服务器的端口 user=user, # mysql的用户 passwd=pwd, # 密码 db=dbname, # 需要的数据库名称 ) # 获取本次的游标 cur = con.cursor() except pymysql.Error as e: print("Error %d: %s" % (e.args[0], e.args[1])) cur.execute('select * from team_channel_relation ') team_to_channel = cur.fetchall() team_to_channel = list(map(list, zip(*team_to_channel))) team = team_to_channel[0][:] channel = team_to_channel[1][:] team_to_channel = {'team': team, 'channel': channel} team_to_channel = pd.DataFrame(team_to_channel) del team del channel for channel_file in channels_list: gexf = Gexf("Gephi.org", "A Web network") output = gexf.addGraph("directed", "static", "A Web network") cur.execute('select * from people_channel_relation where channel_id = \'' + channel_file + '\' ') person_and_channel = cur.fetchall() if len(person_and_channel) == 0: output_file = open(".\graphdata_of_" + channel_file + ".gexf", "wb") gexf.write(output_file) output_file.close() else: person_and_channel = list(map(list, zip(*person_and_channel))) person = person_and_channel[0][:] channel = person_and_channel[1][:] person_and_channel = {'person': person, 'channel': channel} person_and_channel = pd.DataFrame(person_and_channel) del person del channel person_list = person_and_channel['person'] # print(person_and_channel) channel_node = output.addNodeAttribute(force_id="Channel", title="channel", type="String") team_node = output.addNodeAttribute(force_id="Team", title="team", type="String") weight_node = output.addNodeAttribute(force_id="Weight", title="weight", type="float") people_id = dict() # id_list = ['people'] # for id in id_list: # cur.execute('select * from ' + id) # data = cur.fetchall() # for i in data: # exec(id + '_id[\'' + i[0] + '\']=i[1]') # print('id', id) person_set = set(person_list) person_to_channel = [] for tem_person in person_set: cur.execute('select * from people_channel_relation where people_id = \'' + tem_person + '\' ') person_to_channel = person_to_channel + list(cur.fetchall()) person_to_channel = list(map(list, zip(*person_to_channel))) person = person_to_channel[0][:] channel = person_to_channel[1][:] person_to_channel = {'person': person, 'channel': channel} person_to_channel = pd.DataFrame(person_to_channel) # print(person_to_channel) cc = 0 num2333 = len(person_set) for tem_id in person_set: # print(cc / num2333) try: tem_name = people_id[tem_id] except KeyError: tem_name = "Null" tem_channel_list = set(person_to_channel[person_to_channel['person'] == tem_id]['channel']) tmp_node = output.addNode(tem_id, tem_name) tmp_node.addAttribute(weight_node, str(int(100 * random.random()))) tem_team_list = set() for tem_channel in tem_channel_list: # cur.execute('select team_id from team_channel_relation where channel_id = \'' + tem_channel + '\'') # tem_team_list = cur.fetchall() tem_team_list = tem_team_list | set( team_to_channel[team_to_channel['channel'] == tem_channel]['team']) for tem_team in tem_team_list: tmp_node.addAttribute(team_node, tem_team) for tem_channel in tem_channel_list: tmp_node.addAttribute(channel_node, tem_channel) cc = cc + 1 m = 'mention_based_graph_info' cur.execute('select * from ' + m + ' where channel_id = \'' + channel_file + '\' ') data = cur.fetchall() msg_att = output.addEdgeAttribute(force_id="Message", title="message", type='String', defaultValue='None') weight_att = output.addEdgeAttribute(force_id="Weight", title="weight", type='float', defaultValue='0') date_att = output.addEdgeAttribute(force_id="Date", title="date", type='float', defaultValue='None') channel_att = output.addEdgeAttribute(force_id="Channel", title="channel", type='String', defaultValue='None') team_att = output.addEdgeAttribute(force_id="Team", title="team", type='String', defaultValue='None') cc = 0 numhehe = len(data) for tem_m in data: # print(cc / numhehe) sender, receiver, text, channel_id, team_id, ts = tem_m blob = TextBlob(text) weight = str(blob.sentiment.polarity) try: tem_edge = output.addEdge(sender + receiver + str(cc), sender, receiver, weight=weight) except Exception: try: tmp_node = output.addNode(receiver, 'Null') tem_edge = output.addEdge(sender + receiver + str(cc), sender, receiver, weight=weight) except Exception: tmp_node = output.addNode(sender, 'Null') tem_edge = output.addEdge(sender + receiver + str(cc), sender, receiver, weight=weight) cc = cc + 1 tem_edge.addAttribute(msg_att, text) tem_edge.addAttribute(weight_att, weight) tem_edge.addAttribute(date_att, str(ts)) tem_edge.addAttribute(team_att, team_id) tem_edge.addAttribute(channel_att, channel_id) output_file = open(dir_path + "/mention_based/{}_{}.gexf".format(team_name, channel_file), "wb") gexf.write(output_file) output_file.close()
if not mapcollection in db.collection_names(): mapF = Code(open('../mapReduce/mapGraph.js','r').read()) reduceF = Code(open('../mapReduce/reduceGraph.js','r').read()) collection.map_reduce(mapF,reduceF,query=getDateQuery(start,end), out=mapcollection) gexf = Gexf(creator="lomo", description="Relations") graph = gexf.addGraph(type="directed", mode="static", label="relations") giid = graph.addNodeAttribute("Global Indegree", "0", type="float") goid = graph.addNodeAttribute("Global Outdegree", "0", type="float") gort = graph.addNodeAttribute("Retweets", "0", type="float") gomt = graph.addNodeAttribute("Mentions", "0", type="float") userMap = {} userNodeMap = {} for user in db[mapcollection].find().sort([('value.indegree', -1)]): userMap[user['_id']] = user['value'] node = graph.addNode(user['_id'], user['_id']) node.addAttribute(giid, str(user['value']['indegree'])) node.addAttribute(goid, str(user['value']['outdegree'])) node.addAttribute(gort, str(user['value']['rts'])) node.addAttribute(gomt, str(user['value']['mts'])) userNodeMap[user['_id']] = node for name in userMap.keys(): outlinks = userMap[name]['outlinks'] for link in outlinks: if link != name and link in userMap.keys(): graph.addEdge(name + ":" + link, userNodeMap[name].id, userNodeMap[link].id) file = open(output, 'w') gexf.write(file)
node = Node(graph, id, db_node['name'], datetime.utcfromtimestamp(db_node['timeNominated']).isoformat()) if 'didDonate' in db_node.properties and db_node['didDonate']: node.addAttribute('challenge_status', 'donated', datetime.utcfromtimestamp(db_node['donationDate']).isoformat()) elif 'timeCompleted' in db_node.properties: node.addAttribute('challenge_status', 'completed', datetime.utcfromtimestamp(db_node['timeCompleted']).isoformat()) graph.nodes[id] = node def get_nominations(): return db.match(rel_type='NOMINATED') def add_nominations_to_graph(graph): for db_nomination in get_nominations(): id = str(db_nomination._id) source_id = str(db_nomination.start_node._id) target_id = str(db_nomination.end_node._id) graph.addEdge(id, source_id, target_id, start=datetime.utcfromtimestamp(db_nomination['timeNominated']).isoformat(), label='NOMINATED') graph = init_graph() gexf = Gexf('Brad Ross', 'A Network of Ice Bucket Challenges') gexf.graphs.append(graph) add_participants_to_graph(graph) add_nominations_to_graph(graph) output_file = open('nomination_graph.gexf', 'w') gexf.write(output_file, print_stat=True)
def create_gexf_from_db(self): #получаем имена колонок из таблиц с узлами и рёбрами для того, чтобы знать имена необязательных атрибутов графа self.cursor.execute( "select string_agg(column_name,',') as str from INFORMATION_SCHEMA.COLUMNS where table_name = '" + self.db_table_nodes + "' and table_schema = '" + self.db_scheme + "';") self.column_nodes = next(self.cursor)[0] self.column_nodes_arr = self.column_nodes.split(',') self.cursor.execute( "select string_agg(column_name,',') as str from INFORMATION_SCHEMA.COLUMNS where table_name = '" + self.db_table_edges + "' and table_schema = '" + self.db_scheme + "';") self.column_edges = next(self.cursor)[0] self.column_edges_arr = self.column_edges.split(',') self.cursor.execute("select " + self.column_nodes + " from " + self.db_scheme + "." + self.db_table_nodes + ";") for row in self.cursor: i = 0 row_dict = {} for field in self.column_nodes_arr: row_dict.update({field: str(row[i])}) i += 1 self.nodes_arr_of_dict.append(row_dict) self.cursor.execute("select " + self.column_nodes + " from " + self.db_scheme + "." + self.db_table_nodes + ";") for row in self.cursor: i = 0 row_dict = {} for field in self.column_nodes_arr: row_dict.update({field: str(row[i])}) i += 1 self.nodes_arr_of_dict.append(row_dict) self.cursor.execute("select " + self.column_edges + " from " + self.db_scheme + "." + self.db_table_edges + ";") for row in self.cursor: i = 0 row_dict = {} for field in self.column_edges_arr: row_dict.update({field: str(row[i])}) i += 1 self.edges_arr_of_dict.append(row_dict) #gexf операции gexf = Gexf("DAKOKS", "topology_tn_region") graph = gexf.addGraph("undirected", "static", "topology_tn_region_graph") for attr in self.column_nodes_arr: if attr not in ['id', 'size', 'r', 'g', 'b', 'label', 'x', 'y']: graph.addNodeAttribute(title=attr, defaultValue=None, type="string") for attr in self.column_edges_arr: if attr not in [ 'id', 'source', 'target', 'label', 'weight', 'r', 'g', 'b' ]: graph.addEdgeAttribute(title=attr, defaultValue=None, type="string") for node_data in self.nodes_arr_of_dict: graph.addNode(node_data['id'], node_data['label'], r=node_data['r'], g=node_data['g'], b=node_data['b'], x=node_data['x'], y=node_data['y']) for attr in graph.attributes['node']: graph.nodes[node_data['id']].addAttribute( attr, node_data[graph.attributes['node'][attr]['title']]) for edge_data in self.edges_arr_of_dict: graph.addEdge(edge_data['id'], edge_data['source'], edge_data['target'], weight=edge_data['weight'], label=edge_data['label'], r=edge_data['r'], g=edge_data['g'], b=edge_data['b']) for attr in graph.attributes['edge']: graph.edges[edge_data['id']].addAttribute( attr, edge_data[graph.attributes['edge'][attr]['title']]) output_file = open("MY_GEXF.gexf", "w") gexf.write(output_file)
def mkgexfile(filepath, pathinfo): gexf = Gexf("NJFE_wlw", "A traceability network") graph = gexf.addGraph("directed", "", "Back or Foward traceability") atr1 = graph.addNodeAttribute('commodity_name', mode="dynamic", defaultValue='', type='string', force_id="commodity_name") atr2 = graph.addNodeAttribute('time', mode="dynamic", defaultValue='', type='string', force_id="time") atr3 = graph.addNodeAttribute('nodename', mode="dynamic", type='string', defaultValue='', force_id="nodename") # 节点名称 atr4 = graph.addNodeAttribute( 'nodetype', mode="dynamic", type='integer', defaultValue='', force_id="nodetype") # supplier、repertory和consumer # atr5 = graph.addNodeAttribute('') prodtnames = pathinfo[0] # 产品出厂信息: 产品名称、出厂时间、(还需要补充商家信息) repo = pathinfo[1] # 产品仓库信息:仓库名称、入库时间 consumers = pathinfo[2] # 消费者信息:购买的产品名称、购买的时间 # 厂家节点 for i, p in enumerate(prodtnames): tmp = graph.addNode('s_' + ` i `, "suppliers_%s" % i) tmp.addAttribute(atr1, p[0]) # 产品名称 tmp.addAttribute(atr2, p[1]) # 出厂日期 tmp.addAttribute(atr4, "0") # 节点类型:"supplier" # 仓库节点(销售地点) tmp = graph.addNode("R_1", "Repertory") tmp.addAttribute(atr4, "1") # "repetory" tmp.addAttribute(atr3, repo) for i, c in enumerate(consumers): tmp = graph.addNode("c_" + ` i `, "consumer_%s" % i) tmp.addAttribute(atr1, c[0]) # 产品名称 tmp.addAttribute(atr2, c[1]) # 消费者购买日期 tmp.addAttribute(atr3, "consumer_%s" % i) # 节点名称 tmp.addAttribute(atr4, "2") # 节点类型 "consumer") # print "finished nodes" for i, _ in enumerate(prodtnames): graph.addEdge( ` i `, 's_' + ` i `, "R_1", weight='1') skiplen = len(prodtnames) for i, _ in enumerate(consumers): graph.addEdge( ` (i + skiplen) `, "R_1", "c_" + ` i `, weight='1') # print "finied edges" output_file = open(filepath, "w") gexf.write(output_file) pass
db = geoip.Database('GeoIP.dat') gexf = Gexf("Alexander Konovalov", "The internet map") graph = gexf.addGraph("directed", "static", "The internet map") attrid = graph.addNodeAttribute("country", "??", type="string") def addIP(ip): if not graph.nodeExists(ip): info = db.lookup(ip) country = "??" if info.country: country = info.country n = graph.addNode(ip, ip) n.addAttribute(attrid, country) n = 0 with open("out.csv") as f: for line in f: ips = tuple(line.strip().replace("-", ".").split('\t')) if len(ips) < 2: continue ip1, ip2 = ips addIP(ip1) addIP(ip2) graph.addEdge("%s->%s" % (ip1, ip2), ip1, ip2) n += 1 print n with open("test.gexf", "w") as f: gexf.write(f)
id = 0 for item in dicL: for i in range(len((dicL[item])[0])): if ((dicL[item])[0])[i] != ((dicL[item])[1])[i]: id += 1 srcdst = item.split('_') edgeDuration = ((dicL[item])[1])[i] - ((dicL[item])[0])[i] e = graph.addEdge(str(id), str(srcdst[0]), str(srcdst[1]), ((dicL[item])[2])[i], str(dicL[item][0][i]), str(((dicL[item])[1])[i]), str(item)) e.addAttribute(idLinkType, dicL[item][3][i]) e.addAttribute(idTotalEdgeDuration, str(edgeDuration)) import os dir = ('Grafos/' + (lsArgs[1].split('.'))[0]).replace('arq\\', '') try: os.mkdir(dir) except OSError: print 'Pasta ja existe' gexf_file = open( dir + "/" + ((lsArgs[1].split('.'))[0]).replace('arq\\', '') + ".gexf", "w") gexf.write(gexf_file) print("::.. %s seconds ..::" % (time.time() - start_time))