def get_edge_IP_URL(): """ Create nodes of all ReqURL IPs in the db :return: """ try: cnx = mysql.connector.connect(user='******', password='******', host='127.0.0.1', database='bot') cursor = cnx.cursor() sql = 'select reqIP,reqURL from bot.request ' if VERBOSE: print sql cursor.execute(sql) res = cursor.fetchall() i = 0 results = cursor.rowcount while cursor: node_a = graph.Node(str(res[i][0]), size=3, x=10, y=10 * i) node_b = graph.Node(str(res[i][1]), size=3, x=1000, y=10 * i) stream.add_node(node_a) stream.add_node(node_b) edge_ab = graph.Edge(node_a, node_b) stream.add_edge(edge_ab) i += 1 if results > 0: print 'Records Found' return 1 return 0 except: if VERBOSE: print ( '\t', 'Error: get_edge_IP_URL (' + sql + ')', sys.exc_info()[0]) return 0
def export_gephi(self): try: tmp_host = self.sel_host.split("_") print(tmp_host[0]) stream = streamer.Streamer( streamer.GephiWS(hostname="localhost", port=8080, workspace="workspace1")) for domain in self.obj_dns_parser.csv_obj.h.h.hosts[ tmp_host[0]].domain: node_b = graph.Node(domain, size=1) for txn_id in self.obj_dns_parser.csv_obj.h.h.hosts[ tmp_host[0]].domain[domain].list: for resp in self.obj_dns_parser.csv_obj.h.h.hosts[ tmp_host[0]].domain[domain].list[txn_id].response: node_a = graph.Node(resp.resolved_ip, size=2) stream.add_node(node_a) stream.add_node(node_b) edge_ab = graph.Edge(node_a, node_b) stream.add_edge(edge_ab) except: self.lbl_status.config(text="Failed in export_gephi.")
def addFolder(path): try: abspath = os.path.join(path, 'all_data') if os.path.isfile(abspath): print abspath with open(abspath, 'r') as f: contents = f.read().splitlines() for line in contents: #get first columb data items = re.split(r'\t+', line) labels, labels_1, labels_2 = re.split( r':+', items[0]), re.split(r':+', items[1]), re.split(r':+', items[2]) #type = labels[1] stream.add_node(graph.Node(labels[2], type=labels[1]), graph.Node(labels_1[2], type=labels_1[1])) #edges stream.add_edge( graph.Edge(labels[2], labels_1[2], custom_property=labels_2[1])) print 'success' except: #the .dsStore file print("Unexpected error:", sys.exc_info()[0]) raise
def get_edge_IP_URL_excel(): """ Create nodes of all ReqURL IPs in the db :return: """ try: req_infile = open('E:\\PhD\\python\\traffic\\20160421_150521.pcap_req.csv', 'r') req_reader = csv.reader(req_infile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) i = 0 for res in req_reader: node_a = graph.Node(str(res[1]), size=3, x=p[i][0], y=p[i][1]) tmpstr = str(res[2]) pt = tmpstr.split('.') if len(pt[(len(pt) - 1)]) == 2: node_b = graph.Node(pt[(len(pt) - 3)], size=10, x=p2[i][0], y=p2[i][1]) else: node_b = graph.Node(pt[(len(pt) - 2)], size=10, x=p2[i][0], y=p2[i][1]) stream.add_node(node_a) stream.add_node(node_b) edge_ab = graph.Edge(node_a, node_b) stream.add_edge(edge_ab) i += 1 if results > 0: print 'Records Found' return 1 return 0 except: if VERBOSE: print ( '\t', 'Error: get_edge_IP_URL_Excel ', sys.exc_info()) return 0
def received_message(self, m): #Loading the data as json data = json.loads(str(m)) print(json.dumps(data, sort_keys=True, indent=4)) #Created the node that represent the transaction transactionNode = graph.Node(data['x']['hash'], blue=1, x=random.randint(0, 500), y=random.randint(0, 500)) #With some properties for prop in [ 'vin_sz', 'vout_sz', 'lock_time', 'relayed_by', 'tx_index', 'time' ]: transactionNode.property[prop] = data['x'][prop] #Hack to avoid "size" of the node transactionNode.property['transaction_size'] = data['x']['size'] #we type our node transactionNode.property['type'] = 'Transaction' #For all incomming flow inN = [ graph.Node(inp['prev_out']['addr'], red=1, x=random.randint(0, 500), y=random.randint(0, 500), type="Wallet", time=data['x']["time"]) for inp in data['x']['inputs'] ] inE = [ graph.Edge(inp['prev_out']['addr'], transactionNode, True, weight=inp['prev_out']['value'], type=inp['prev_out']['type']) for inp in data['x']['inputs'] ] #For all outgoing flow outN = [ graph.Node(out['addr'], red=1, type="Wallet", time=data['x']["time"], x=random.randint(0, 500), y=random.randint(0, 500)) for out in data['x']['out'] ] outE = [ graph.Edge(transactionNode, out['addr'], True, weight=out['value'], type=out['type']) for out in data['x']['out'] ] t.add_node(transactionNode, *inN, *outN) t.add_edge(*inE, *outE)
def main(): streamer.GephiWS(hostname='localhost', port=8080, workspace='workspace0') stream = streamer.Streamer(streamer.GephiWS()) node_a = graph.Node("A") node_b = graph.Node("B") stream.add_node(node_a, node_b) edge = graph.Edge(node_a, node_b) stream.add_edge(edge)
def add_to_graph(origin, linked_articles): o_node = graph.Node(origin.id, label=origin.title, namespace=origin.namespace) stream.change_node(o_node) for article in linked_articles: node = graph.Node(article.id, label=article.title, namespace=article.namespace) stream.add_node(node) edge = graph.Edge(o_node, node) stream.add_edge(edge) stream.commit()
def GetUniqueIP(): global stream try: cnx = mysql.connector.connect(user='******', password='******', host='127.0.0.1', database='bot') cursor = cnx.cursor() sql = 'select distinct reqIP from bot.request;' if VERBOSE: print sql cursor.execute(sql) res = cursor.fetchall() i = 0 results = cursor.rowcount while cursor: node_a = graph.Node(str(res[i]), size=10, x=10 * i, y=5 * i) stream.add_node(node_a) i += 1 if results > 0: print 'Records Found' return 1 return 0 except: if VERBOSE: print ( '\t', 'Error: GetUniqueIP (' + sql + ')', sys.exc_info()[0]) return 0
def gephi_start(self, threshold=28, workspace='chemspace'): self.stream = streamer.Streamer(streamer.GephiWS(workspace=workspace)) base = self.fingerprints_df[self.base] distances = [[ ChemicalSpaceGraph.similarity(self.fingerprints_df[x], base), x ] for x in self.nodes] distances.sort() rankings = {x[1]: [i, x[0]] for i, x in enumerate(distances)} nodes = [ graph.Node(x, distance=rankings[x][1], ranking=rankings[x][0]) for x in self.nodes ] self.stream.add_node(*nodes) similarities = pd.Series(list(self.edges.values())) m = similarities.max() edges = [ graph.Edge(x, y, directed=False, weight=1 - self.edges[(x, y)] / m, label=self.edges[(x, y)]) for x, y in self.edges if self.edges[(x, y)] <= threshold ] self.stream.add_edge(*edges)
def add_node(subreddit): try: size = data.loc[subreddit].normed_subscribers category = data.loc[subreddit].category nsfw = data.loc[subreddit].over_18 except KeyError: size = 1 category = "" nsfw = False if nsfw: red, green, blue = 0, 0, 1 else: test_string = abs(hash(category)) red = 1 - test_string % 524288 / 524288.0 test_string = test_string / 524288 green = 1 - test_string % 524288 / 524288.0 test_string = test_string / 524288 blue = 1 - test_string % 524288 / 524288.0 return graph.Node(subreddit, label=subreddit, red=red, green=green, blue=blue, size=size)
def addSpecificNode(path, element_name): try: abspath = os.path.join(path, 'transE') if os.path.isfile(abspath): print abspath with open(abspath, 'r') as f: contents = f.read().splitlines() index = 0.0 legnth = len(contents) print legnth for line in contents: items = re.split(r'\t+', line) labels, labels_1, labels_2 = re.split( r'_+', items[0]), re.split(r'_+', items[1]), re.split(r'_+', items[2]) node_name = ''.join(labels[2:]) node_name_1 = ''.join(labels_1[2:]) if node_name == element_name: try: stream.add_node(graph.Node(node_name, type=labels[1])) stream.add_node( graph.Node(node_name_1, type=labels_1[1])) #edges stream.add_edge( graph.Edge(node_name, node_name_1, relationship=labels_2[0])) if index % 1000 == 0: print index / legnth except: pass #print 'item omited' index += 1 print 'success' except: #the .dsStore file print("Unexpected error:", sys.exc_info()[0]) raise
def add_to_graph(origin, related): o_node = graph.Node(origin.id, label=origin.name, url=origin.url, subscriptions=origin.subscriptions, views=origin.views) stream.change_node(o_node) for ch in related: node = graph.Node(ch.id, label=ch.name, url=ch.url, subscriptions=0, views=0) stream.add_node(node) edge = graph.Edge(o_node, node) stream.add_edge(edge) stream.commit()
def visualize_url_flux(self): i = 0 for url in self.url: i += 1 if len(self.url[url].list) < 10: continue else: tmp_list = [] for ip in self.url[url].list: tmp_list.append(ip) if not self.check_ip_list(tmp_list): node_a = graph.Node(url) stream.add_node(node_a) for items in tmp_list: node_b = graph.Node(items) stream.add_node(node_b) edge_ab = graph.Edge(node_a, node_b) stream.add_edge(edge_ab)
def visualize_ip_flux(self): i = 0 for ip in self.ip: i += 1 # Subject to minimum of 20 URL if len(self.ip[ip].list) < 10: continue else: tmp_list = [] for url in self.ip[ip].list: tmp_list.append(url) if not self.check_domain_list(tmp_list): node_a = graph.Node(ip) stream.add_node(node_a) for items in tmp_list: node_b = graph.Node(items) stream.add_node(node_b) edge_ab = graph.Edge(node_a, node_b) stream.add_edge(edge_ab)
def addfnode(self, fname): if (fname in self.nodedict): nnode = self.nodedict[fname] else: nnode = graph.Node(fname, size=self.node_size, x=self.r_seed * rn.random(), y=self.r_seed * rn.random(), color="#8080ff", type="f") self.nodedict[fname] = nnode # new node into the dictionary return nnode
def addnodes(self, pname, fnodenamelist): pnode = graph.Node(pname, size=self.node_size + (len(fnodenamelist) * self.size_increment), x=self.r_seed * rn.random(), y=self.r_seed * rn.random(), color="#ff8080", type="p") self.stream.add_node(pnode) for fname in fnodenamelist: fnode = self.addfnode(fname) self.stream.add_node(fnode) pfedge = graph.Edge(pnode, fnode, weight=rn.random()) self.stream.add_edge(pfedge)
def received_message(self, m): inNode = [] outNode = [] data = json.loads(str(m)) print(json.dumps(data, sort_keys=True, indent=4)) #Get All in Nodes of the transaction inNode = [ graph.Node(inp['prev_out']['addr'], x=random.randint(0, 500), y=random.randint(0, 500)) for inp in data['x']['inputs'] ] #Get All out Nodes of the transaction outNode = [ graph.Node(out['addr'], x=random.randint(0, 500), y=random.randint(0, 500)) for out in data['x']['out'] ] #Graph All the Things ! t.add_node(*inNode, *outNode) t.add_edge(*[ graph.Edge(n, o, True) for n, o in itertools.product(inNode, outNode) ])
def insertNode(self, location=None, pop_size=500): x_extent = y_extent = pop_size attributes = { 'gen': self.gen, 'fitness': sum(self.fitness), 'parents': compact_str(self.parents), 'x': random.randint(-x_extent, x_extent), 'y': random.randint(-y_extent, y_extent) } if location is not None: if (type(location) in [tuple, list] and len(location) == 2): attributes['x'], attributes['y'] = location # else, the randomized coordinates which have been set already if self.attributes: # need to flatten attributes, GraphStream API rejects `actions` if # Node attribute-values are compound JSON types (like [] or {}) for k in self.attributes: # this will (correctly) overwrite attributes.update(k, compact_str(self.attributes[k])) node = graph.Node(str(self.index), **attributes) response = self.hub.emitNode(node)
from gephistreamer import graph from gephistreamer import streamer stream = streamer.Streamer(streamer.GephiWS()) # Create a node with a custom_property node_a = graph.Node("A", custom_property=1) # Create a node and then add the custom_property node_b = graph.Node("B") node_b.property['custom_property'] = 2 # Add the node to the stream # you can also do it one by one or via a list # l = [node_a,node_b] # stream.add_node(*l) stream.add_node(node_a, node_b) # Create edge # You can also use the id of the node : graph.Edge("A","B",custom_property="hello") edge_ab = graph.Edge(node_a, node_b, custom_property="hello") stream.add_edge(edge_ab)
sorted((this_subreddit, re.split('[^a-zA-Z0-9_]', body)[0].lower()))) subreddit_edges[subreddit_tuple] += 1 # if this_subreddit not in subreddits_subscribers: # subreddits_subscribers[this_subreddit] = r.subreddit(this_subreddit).subscribers # if referenced_subreddit not in subreddits_subscribers: # subreddits_subscribers[referenced_subreddit] = r.subreddit(referenced_subreddit).subscribers stream = streamer.Streamer(streamer.GephiWS(workspace="workspace1")) for key, value in subreddit_edges.items(): try: size_a = data.loc[key[0]].norm_subs except KeyError: size_a = 1 try: size_b = data.loc[key[1]].norm_subs except KeyError: size_b = 1 subreddit_a = graph.Node(key[0], Label=key[0], size=size_a) subreddit_b = graph.Node(key[1], Label=key[1], size=size_b) connection = graph.Edge(subreddit_a, subreddit_b, directed=False, weight=value) stream.add_node(subreddit_a, subreddit_b) stream.add_edge(connection)
# Basic import from gephistreamer import graph from gephistreamer import streamer import itertools import random import time # Same code as GephiREST, but it creates a Websocket client that keep connectivity until the script exits # Much faster than REST method stream = streamer.Streamer(streamer.GephiWS()) test = [x for x in itertools.permutations('ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', 2) ] random.shuffle(test) for source, target in test: node_source = graph.Node(source) node_target = graph.Node(target) stream.add_node(node_source,node_target) # time.sleep(0.5) # Make it slower :D stream.add_edge(graph.Edge(node_source,node_target)) time.sleep(1) #It might be possible the script runs too fast and last action anr't sent properly