def createGraph(results): entities = GdfEntries(Evidence) counter = 1 for i in range(0, len(results), 2): entities.add_node(Evidence(name = results[i], label = counter, filename = results[i] , timestamp = results[i+1], labelvisible = True)) counter += 1 for i in range(0, len(results)-2, 2): for j in range(i+2, len(results), 2): timestamp1 = results[i+1] timestamp2 = results[j+1] if ruleSearchFile.checkTimestamps(timestamp1, timestamp2) == 1: entities.link(results[i], results[j], directed = False) for i in range(0, len(results)-2, 2): timestamp1 = results[i+1] timestamp2 = results[i+3] if ruleSearchFile.checkTimestamps(timestamp1, timestamp2) != 1: entities.link(results[i], results[i+2]) print entities.dumps() f.write(entities.dumps()) f.close()
# coding: utf-8 from gdflib import GdfEntries, Node import csv import re entities = GdfEntries() links = dict() def strip_digits(s): return re.sub("\d+", "", s) def removeWords(w): return w.lower() not in [ "-", "—", ",", ".", "%", ":", "sr", "srs", "sra", "sras", "v.exa", "a", "o", "as", "os", "e", "do", "da", "de", "dos", "das", "na", "no", "nas", "nos", "com", "quem", "um", "uma", "uns", "umas", "esta", "essa", "este", "esse", "estas", "essas", "estes", "esses", "isso", "isto", "em", "para", "se", "mas", "ou", "que", "ao", "à", "aos", "às", "até", "por", "portanto", "pelo", "pelos", "pela", "pelas", "sua", "suas", "seu", "seus", "ali", "aqui", "é", "tem", "são", "foi", "está", "vai", "era", "tinha", "tendo", "palmas", "já" ] def removePonctuations(w): w = w.replace("...", "") w = w.replace("(", "") w = w.replace(")", "") if len(w) > 1:
def test_entries(self): ge = GdfEntries(Product, Edge) ge.add_node(Product(name='Somenode', company='Unknown')) ge.add_node(Product(name='Otherone', company='Unknown')) ge.link('Somenode', 'Otherone', color=(255,0,0)) ge.dumps()
def test_entries(self): ge = GdfEntries(Product, Edge) ge.add_node(Product(name='Somenode', company='Unknown')) ge.add_node(Product(name='Otherone', company='Unknown')) ge.link('Somenode', 'Otherone', color=(255, 0, 0)) ge.dumps()
def createGraph(results): entities = GdfEntries(Evidence) counter = 1 for i in range(0, len(results), 2): entities.add_node( Evidence(name=results[i], label=counter, filename=results[i], timestamp=results[i + 1], labelvisible=True)) counter += 1 for i in range(0, len(results) - 2, 2): for j in range(i + 2, len(results), 2): timestamp1 = results[i + 1] timestamp2 = results[j + 1] if ruleSearchFile.checkTimestamps(timestamp1, timestamp2) == 1: entities.link(results[i], results[j], directed=False) for i in range(0, len(results) - 2, 2): timestamp1 = results[i + 1] timestamp2 = results[i + 3] if ruleSearchFile.checkTimestamps(timestamp1, timestamp2) != 1: entities.link(results[i], results[i + 2]) print entities.dumps() f.write(entities.dumps()) f.close()
class LinkingPerformer(object): def __init__(self, token): self.token = token def __call__(self, friend): user_id = friend['id'] print 'Fetching data of User %s...' % user_id for foaf in get_foaf(user_id, self.token): results_queue.put((user_id, foaf['id'])) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Fetch Facebook Network Graph') parser.add_argument('token', help='Your facebook access token') args = parser.parse_args() entries = GdfEntries() print 'Fetching your friends...' foaf = {} friends = get_friends(args.token) for count, user in enumerate(friends): user_id = user['id'] node = Node(name=user_id, label=user['name']) entries.add_node(node) perform_linking = LinkingPerformer(args.token) pool = Pool(15) pool.map(perform_linking, friends)