Пример #1
0
 def delete(self,
            user_id,
            facebook=True,
            graph=None,
            app_name=None,
            *args,
            **kwargs):
     if not graph:
         graph = get_graph(request=None,
                           app_name=app_name)  # Method needs static graph
     target = '%s_%s' % (self.id, user_id)
     try:
         super(Request, self).delete(facebook=facebook,
                                     graph=graph,
                                     target=target,
                                     *args,
                                     **kwargs)
     except GraphAPIError:
         graph = get_graph(request=None, app_name=app_name)
         try:
             super(Request, self).delete(facebook=facebook,
                                         graph=graph,
                                         target=target,
                                         *args,
                                         **kwargs)
         except GraphAPIError:
             super(Request, self).delete(facebook=False,
                                         graph=None,
                                         *args,
                                         **kwargs)
Пример #2
0
 def delete(self, facebook=True, graph=None, app_name=None, *args, **kwargs):
     if not graph:
         graph = get_graph(request=None, app_name=app_name) # Method needs static graph
     try:
         super(Request, self).delete(facebook=facebook, graph=graph, *args, **kwargs)
     except GraphAPIError:
         graph = get_graph(request=None, app_name=app_name)
         try:
             super(Request, self).delete(facebook=facebook, graph=graph, *args, **kwargs)
         except GraphAPIError:
             super(Request, self).delete(facebook=False, graph=None, *args, **kwargs)
Пример #3
0
def extractLinkRatios(years,resource):
  ratios = {}
  for y in years[0:-1]:
    ratios[y] = []
    [G1,G2] = [get_graph(y,resource),get_graph(y+1,resource)]
    for e in G1.edges(data=True): #e is current year edge
      try:
        pastEdgeData = G2[e[0]][e[1]] #If it doesn't exist we except
        ratios[y].append((e[0],e[1],float(e[2]['weight'])/pastEdgeData['weight'],float(e[2]['weight']),pastEdgeData['weight']))
      except KeyError:
        bloop = 'bloop'
  return ratios
Пример #4
0
 def get_page_access_token(self, request, queryset):
     graph = get_graph(request, force_refresh=True, prefer_cookie=True)
     response = graph.request('me/accounts/')  #&fields=id,access_token
     if response and response.get('data', False):
         data = response['data']
         message = {'count': 0, 'message': u''}
         accounts = {}
         for account in data:
             accounts[int(account['id'])] = account
         for page in queryset:
             if accounts.get(page._id, None):
                 if accounts[page._id].get('access_token', False):
                     queryset.filter(id=page._id).update(
                         _access_token=accounts[page._id]['access_token'])
                     message[
                         'message'] = u'%sSet access token for page %s\n' % (
                             message['message'], page._name)
                 else:
                     message[
                         'message'] = u'%sDid not get access token for page %s\n' % (
                             message['message'], page._name)
             else:
                 message[
                     'message'] = u'%sYou are not admin for page %s\n' % (
                         message['message'], page._name)
         self.message_user(request, '%s\n' % message['message'])
     else:
         self.message_user(request, 'There was an error: %s' % response)
Пример #5
0
 def send_to_facebook(self, app_name=None, graph=None):
     if not graph:
         graph = get_graph(request=None, app_name=app_name)
     if self.score < 0:
         raise AttributeError, 'The score must be an integer >= 0.'
     return graph.request('%s/scores' % self.user.id, '',
                          {'score': str(self.score)})
Пример #6
0
 def get_from_facebook(self, graph=None, save=False, args=None):
     """ Updates the local fields with data from facebook. Use this function."""
     if not graph:
         graph = get_graph()
     target = str(self._id)
     if args:
         target = '%s?%s' % (target, args)
     try:
         response = graph.request(target)
         if response and save:
             self.save_from_facebook(response)
         elif save:
             self._graph = {
                 'django-facebook-error':
                 'The query returned nothing. Maybe the object is not published, accessible?',
                 'response': response,
                 'access_token': graph.access_token
             }
             self.save()
         else:
             return response
     except GraphAPIError:
         logger.warning('Error in GraphAPI')
         if save:
             self.save()
         return None
Пример #7
0
 def get_from_facebook(self, graph=None, save=settings.DEBUG, quick=True):
     """ Only saves the request to the db if DEBUG is True."""
     if quick and save and self._graph:
         return self
     if not graph:
         graph = get_graph()  # get app graph only
     super(Request, self).get_from_facebook(graph=graph, save=True)
Пример #8
0
    def send_to_facebook(self,
                         object='me',
                         save=False,
                         graph=None,
                         message=None,
                         app_name=None):

        if not graph:
            graph = get_graph(app_name=app_name)
        if not message:
            message = self.message
        app_dict = get_app_dict(app_name)

        #response = post_image(graph.access_token, self.image.file, message, object=object)
        #response = graph.put_photo(self.image.file, message=message)
        image_url = 'http://%s%s' % (app_dict['DOMAIN'], self.image.url)
        logger.debug('image_url: %s' % image_url)
        response = graph.put_photo_url(image_url, message, object)
        logger.debug('response: %s' % response)

        if save:
            self.fb_id = response['id']
            self.slug = response['id']
            self.save()
        return response['id']
Пример #9
0
def glr_method(grammar_path, graph_path, out=None, test=False):
    graph = get_graph(graph_path)
    grammar = get_grammar_automata(grammar_path)
    size = len(graph)
    changed = True

    while changed:
        changed = False
        for start in grammar.s:
            for i in range(size):
                for start_grammar in grammar.s[start]:
                    changed |= dfs(graph, grammar, start, [(i, start_grammar)])

    res = []
    for i in range(size):
        for j in range(size):
            for N in graph[i][j]:
                if N in grammar.s:
                    res.append((i, N, j))

    if not test:
        if out is None:
            for (i, N, j) in res:
                print(str(i) + ',' + N + ',' + str(j))
        else:
            with open(out, 'w') as f:
                for (i, N, j) in res:
                    f.write(str(i) + ',' + N + ',' + str(j) + '\n')

    return res
Пример #10
0
 def change_view(self, request, object_id, extra_context=None):
     fb_context = {
         'facebook_apps': settings.FACEBOOK_APPS.keys(),
         'graph' : get_graph(request, force_refresh=True, prefer_cookie=True)
     }
     return super(AdminBase, self).change_view(request, object_id,
         extra_context=fb_context)
Пример #11
0
    def save_to_facebook(self, target, graph=None):
        if not graph: graph = get_graph()

        args = {}
        cached_fields = [
            cached for cached in self._meta.get_all_field_names()
            if cached.find('_') == 0
        ]
        for fieldname in cached_fields:
            fieldclass = self._meta.get_field(fieldname)
            field = getattr(self, fieldname)

            if field:
                if isinstance(fieldclass, models.DateField):
                    args[fieldname[1:]] = field.isoformat()
                elif isinstance(fieldclass, JSONField):
                    args[fieldname[1:]] = json.dumps(field)
                elif isinstance(fieldclass, models.FileField) or isinstance(
                        fieldclass, models.ImageField):
                    raise NotImplementedError  # TODO: use code from image field here
                else:
                    args[fieldname[1:]] = field

        # graph.put_object("me", "feed", message="Hello, world")
        response = graph.put_object(parent_object=str(target),
                                    connection_name=self.Facebook.publish,
                                    **args)
        return response
Пример #12
0
    def __init__(self, channels, random_args, activation, N):
        """
        Keras Layer representing the randomly wired layer.
        Generates a graph and the operations for each node.

        Arguments:
            channels: number of filters per node.
            random_args: dict with parameters for the
                generation of the and the stride of the convolution.
            activation: activation function. Usually ReLU.
            N: number of nodes in the layer.
        """

        super(RandLayer, self).__init__()

        self.graph, self.graph_order, self.start_node, self.end_node = get_graph(
            random_args, N)
        self.triplets = {}
        self.aggregations = {}

        for node in self.graph_order:
            if node in self.start_node:
                self.triplets[node] = Triplet(channels=channels,
                                              activation=None,
                                              strides=random_args['stride'])
            else:
                in_degree = self.graph.in_degree[node]
                if in_degree > 1:
                    self.aggregations[node] = Aggregation(input_dim=in_degree)
                self.triplets[node] = Triplet(channels=channels,
                                              activation=activation)

        self.unweighted_average = tf.reduce_mean
Пример #13
0
def trade_reciprocity(years,resource):
  corrmeans = []
  for year in years:
    G = get_graph(year,resource)
    corrcoeffs = []
    [xs,ys] = [[],[]]
    for country in G.nodes():
      for e in G.edges(country):
        try:
          [x1,y1] = [G[e[0]][e[1]],G[e[1]][e[0]]]
          #print [x1,y1]
          xs.append(x1['weight'])
          ys.append(y1['weight'])
        except KeyError:
          'whoops'
    if len(xs)>1:
      cc = np.corrcoef([xs,ys])
      corrcoeffs.append(cc[0][1])
    #print corrcoeffs
    corrmeans.append(np.mean(corrcoeffs))
    print [year,np.mean(corrcoeffs)]
  write({'means':corrmeans, 'years':years},get_results_directory(resource),'meanReciprocityCorrelation')
  plt.clf()
  plt.plot(years,corrmeans)
  plt.title('Mean Correlation of Import/Export By Year')
  plt.xlabel('Year')
  plt.ylabel('Mean Correlation of Import/Export')
  directory = get_images_directory(resource)
  plt.savefig(directory+'meanReciprocityCorrelation.png')
  plt.clf()
  return 0
Пример #14
0
 def get_from_facebook(self, graph=None, save=settings.DEBUG, quick=True):
     """ Only saves the request to the db if DEBUG is True."""
     if quick and save and self._graph:
         return self
     if not graph:
         graph = get_graph() # get app graph only
     super(Request, self).get_from_facebook(graph=graph, save=True)
Пример #15
0
def getEdgeFeatureCSV(years,resource):
  gs={}
  global countries
  cList =[]
  As=[]
  
  for y in years:
    gs[y]=get_graph(y, resource)
    e = get_graph(y,'essex')
    cList.append(set(gs[y].nodes()))
    cList.append(set(e.nodes()))
    #print 'For the year'
    #pprint(gs[y].nodes())
    #pprint(e.nodes())
  #pprint(cList)
  countries = list(set.intersection(*cList))
  print 'Countries included so far:'
  print countries

  edgefeatureYears=years[:-1]
  nodefeatureYear=years[-2]
  nodefeatureYears=[nodefeatureYear]

  nodefeatures=node_feature_extraction(nodefeatureYears,nodefeatureDict,resource)
  edgefeatures=edge_feature_extraction(edgefeatureYears, countries, edgefeatureDict,resource)
  nodeToEdgeFeatures=convertNodalFeaturesToEdgeFeatures(countries, nodefeatureYears, nodefeatures)
  edgefeatures.update(nodeToEdgeFeatures)

  features=edgefeatures.keys()
  check_path('data/raw/features/'+resource[0]+'/edgedata/')
  filename=open('data/raw/features/'+resource[0]+'/edgedata/'+str(years[-1])+'.csv', 'wb')
  writer = csv.writer(filename)
  fnames=copy.deepcopy(features)
  fnames.append("t")
  fnames.insert(0, "edge")
  writer.writerow(fnames)
  for c1 in countries:
    for c2 in countries:
      row=[c1+"_"+c2, ]
      for f in features:
        row.append(edgefeatures[f][(c1, c2)])
      try:
        t=gs[years[-1]][c1][c2]["weight"]
      except:
        t=0
      row.append(t)
      writer.writerow(row)
Пример #16
0
 def change_view(self, request, object_id, extra_context=None):
     fb_context = {
         'facebook_apps': settings.FACEBOOK_APPS.keys(),
         'graph': get_graph(request, force_refresh=True, prefer_cookie=True)
     }
     return super(AdminBase, self).change_view(request,
                                               object_id,
                                               extra_context=fb_context)
Пример #17
0
def gll_method(grammar_filename, graph_filename, out=None, test=False):
    graph = utils.get_graph(graph_filename)
    grammar = utils.get_grammar_automata(grammar_filename)
    gss = defaultdict(lambda: defaultdict(set))
    popped = defaultdict(list)
    q = set()
    used = set()
    res = []
    fins = []

    for it in grammar.f.values():
        for element in it:
            fins.append(element)

    for i in range(len(graph)):
        for j in grammar.s:
            for k in grammar.s[j]:
                q.add((i, k, (j, i)))

    while q:
        conf = q.pop()
        if (conf[0], conf[1], conf[2]) in used:
            continue
        used.add((conf[0], conf[1], conf[2]))

        if conf[1] in fins:
            q.update(
                (conf[0], x, j) for j in gss[conf[2]] for x in gss[conf[2]][j])
            res.append((conf[2][1], conf[2][0], conf[0]))
            popped[conf[2]].append(conf[0])
        for i, labels_grammar in enumerate(grammar.g[conf[1]]):
            for j, lbls_graph in enumerate(graph[conf[0]]):

                for lbl_grammar in labels_grammar:
                    if lbl_grammar not in grammar.t:
                        gss[(lbl_grammar, conf[0])][conf[2]].add(i)
                        gss_node = (lbl_grammar, conf[0])
                        for st in grammar.s[lbl_grammar]:
                            q.add((conf[0], st, gss_node))

                        if gss_node in popped:
                            for v in popped[gss_node]:
                                if (v, i, conf[2]) not in used:
                                    q.add((v, i, conf[2]))
                    for lbl_graph in lbls_graph:
                        if lbl_grammar == lbl_graph and lbl_grammar in grammar.t:
                            q.add((j, i, conf[2]))

    if not test:
        if out is None:
            for (i, N, j) in res:
                print(str(i) + ',' + N + ',' + str(j))
        else:
            with open(out, 'w') as f:
                for (i, N, j) in res:
                    f.write(str(i) + ',' + N + ',' + str(j) + '\n')

    return res
Пример #18
0
 def update_rsvp_status(self, user_id, access_token=None):
     if not access_token: access_token=get_graph().access_token
     response = get_FQL('SELECT rsvp_status FROM event_member WHERE uid=%s AND eid=%s' % (user_id, self.id),
                        access_token=access_token)
     if len(response):
         self.save_rsvp_status(user_id, response[0]['rsvp_status'])
         return response[0]['rsvp_status']
     else:
         return 'not invited'
Пример #19
0
def edge_feature_extraction(years, countries, featureDict):
  
  featureData = {}
  for year in years:
    print year
    G = get_graph(year,'essex')
    for f in featureDict:
      featureData[f+str(year)] = featureDict[f](G, countries ,year)
  return featureData
Пример #20
0
 def delete(self, facebook=False, graph=None, *args, **kwargs):
     """ Deletes the local model and if facebook is true, also the facebook instance."""
     if facebook:
         if not graph: graph = get_graph()
         graph.delete_object(str(self.id))
     try:
         # if the model is abstract, it cannot be saved, but thats ok
         super(Base, self).delete(*args, **kwargs)
     except: # AssertionError
         pass
Пример #21
0
def linksAddedPerYear(years,resource):
  #predicates = list of pairs of lambda expressions returning true/false
  predicates = [[lambda x: x[2]['weight']>0, lambda x: x[2]['weight']>0],[lambda x: x[2]['weight']>100000, lambda x: x[2]['weight']>200000]]
  links = {}
  for y in years[0:-2]:
    [G1,G2] = [get_graph(y,resource),get_graph(y+1,resource)]
    [E1,E2] = [G1.edges(data=True),G2.edges(data=True)]
    #print [len(E1),len(G1.nodes()),len(E1)/(float(len(G1.nodes()))*len(G1.nodes()))]
    links[y] = {}
    counter = 0
    for p in predicates:
      counter += 1
      [newE1, newE2] = [filter(p[0],E1),filter(p[1],E2)]
      newE1simple = map(lambda x: (x[0],x[1]), newE1)
      #addedEdges = [x in newE2 if (x[0],x[1]) not in newE1simple]
      addedEdges = filter(lambda x: (x[0],x[1]) not in newE1simple, newE2)
      links[y][counter] = addedEdges
      print [y, counter, len(addedEdges)]
  return links
Пример #22
0
def visualizeGraphs(years,resource):
  for year in years:
    G = get_graph(year,resource)
    plt.clf()
  #Visualize the network
    ecolors = map(lambda e: e[2]['weight'], G.edges(data=True))
    pos=nx.spring_layout(G)
    nx.draw(G,pos=pos,node_size=80,with_labels=True)
    plt.savefig(get_images_directory(resource)+"graph"+str(year)+'.png')
  return 0
Пример #23
0
 def update_rsvp_status(self, user_id, access_token=None):
     if not access_token: access_token = get_graph().access_token
     response = get_FQL(
         'SELECT rsvp_status FROM event_member WHERE uid=%s AND eid=%s' %
         (user_id, self.id),
         access_token=access_token)
     if len(response):
         self.save_rsvp_status(user_id, response[0]['rsvp_status'])
         return response[0]['rsvp_status']
     else:
         return 'not invited'
Пример #24
0
def matrix_method(grammar_filename, graph_filename, out=None, test=False):
    graph = utils.get_graph(graph_filename)
    grammar_rules, epsilons = utils.get_grammar_homsky(grammar_filename)
    N = len(graph)
    A = [[[] for i in range(N)] for j in range(N)]
    terms = set()
    transitions = defaultdict(list)

    for key in grammar_rules:
        for rule in grammar_rules[key]:
            lhs = ''.join(x for x in rule)
            transitions[lhs].append(key)
            if len(rule) == 1:
                terms.add(rule[0])

    for i in range(N):
        for j in range(N):
            for element in graph[i][j]:
                if element in terms:
                    A[i][j] += transitions[element]
    for key in epsilons:
        for i in range(N):
            A[i][i].append(key)
    changed = True
    while changed:
        changed = False
        for i in range(N):
            for j in range(N):
                for k in range(N):
                    for part1 in A[i][j]:
                        for part2 in A[j][k]:
                            rule = part1 + part2
                            if rule in transitions:
                                lhs = transitions[rule]
                                for element in lhs:
                                    if element not in A[i][k]:
                                        A[i][k].append(element)
                                        changed = True
    res = []
    for i in range(N):
        for j in range(N):
            for k in A[i][j]:
                res.append((i, k, j))

    if not test:
        if out is None:
            for (i, N, j) in res:
                print(str(i) + ',' + N + ',' + str(j))
        else:
            with open(out, 'w') as f:
                for (i, N, j) in res:
                    f.write(str(i) + ',' + N + ',' + str(j) + '\n')
    return res
Пример #25
0
    def act(self, obs):

        x, batch, ei, _ = get_graph(obs)
        out = self.memory(x, batch, ei)
        action_probas = self.actor(out)
        values = self.critic(out)

        # TODO: sample action probabilistically from policy
        # TODO: entropy/exploration ?
        action = 0

        return action
Пример #26
0
 def delete(self, facebook=False, graph=None, target=None, *args, **kwargs):
     """ Deletes the local model and if facebook is true, also the facebook instance."""
     if facebook:
         if not graph:
             graph = get_graph()
         if not target:
             target = str(self.id)
         graph.delete_object(target)
     try:
         # if the model is abstract, it cannot be saved, but thats ok
         super(Base, self).delete(*args, **kwargs)
     except:  # AssertionError
         pass
Пример #27
0
def degreeDistributions(years, resource):
  for y in years:
    g=get_graph(y, resource) 
    degrees=[]
    for n in g.nodes():
      degrees.append(len(g[n]))
    plt.clf()
    plt.hist(degrees)
    plt.xlabel('Degree')
    plt.ylabel('Counts')
    plt.title("Degree Distribution "+str(year))
    plt.savefig(get_images_directory(resource)+'degreeHist'+str(y)+'.png')
    plt.clf()
Пример #28
0
    def send_to_facebook(self, object="me", save=False, graph=None, message=None, app_name=None):

        if not graph:
            graph = get_graph(app_name=app_name)
        if not message:
            message = self.message

        response = post_image(graph.access_token, self.image.file, message, object=object)

        if save:
            self.fb_id = response["id"]
            self.save()
        return response["id"]
Пример #29
0
def test_mlp_partial_simple_epoch(epoch,
                                  args,
                                  rnn,
                                  output,
                                  data_loader,
                                  save_histogram=False,
                                  sample_time=1):
    rnn.eval()
    output.eval()
    G_pred_list = []
    for batch_idx, data in enumerate(data_loader):
        x = data["x"].float()
        y = data["y"].float()
        y_len = data["len"]
        test_batch_size = x.size(0)
        rnn.hidden = rnn.init_hidden(test_batch_size)
        # generate graphs
        max_num_node = int(args.max_num_node)
        y_pred = Variable(
            torch.zeros(
                test_batch_size, max_num_node,
                args.max_prev_node)).cuda()  # normalized prediction score
        y_pred_long = Variable(
            torch.zeros(test_batch_size, max_num_node,
                        args.max_prev_node)).cuda()  # discrete prediction
        x_step = Variable(torch.ones(test_batch_size, 1,
                                     args.max_prev_node)).cuda()
        for i in range(max_num_node):
            print("finish node", i)
            h = rnn(x_step)
            y_pred_step = output(h)
            y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
            x_step = sample_sigmoid_supervised_simple(
                y_pred_step,
                y[:, i:i + 1, :].cuda(),
                current=i,
                y_len=y_len,
                sample_time=sample_time,
            )

            y_pred_long[:, i:i + 1, :] = x_step
            rnn.hidden = Variable(rnn.hidden.data).cuda()
        y_pred_data = y_pred.data
        y_pred_long_data = y_pred_long.data.long()

        # save graphs as pickle
        for i in range(test_batch_size):
            adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
            G_pred = get_graph(adj_pred)  # get a graph from zero-padded adj
            G_pred_list.append(G_pred)
    return G_pred_list
Пример #30
0
def f_export_diff(G1, countries, year):
  expDiffDat={}
  G0=get_graph(year-1, "essex")
  for c1 in countries:
    for c2 in countries:
      try:
        exp0=G0[c1][c2]["weight"]
      except:
        exp0=0
      try:
        exp1=G1[c1][c2]["weight"]
      except:
        exp1=0
      expDiffDat[(c1, c2)]=exp1-exp0
  return expDiffDat
Пример #31
0
def p_newEdge_degree(years,resource):
  #predicates = list of pairs of lambda expressions returning true/false
  p = [lambda x: x[2]['weight']>0, lambda x: x[2]['weight']>0]
  toCounts=[]
  fromCounts=[]
  for y in years[0:-2]:
    [G1,G2] = [get_graph(y,resource),get_graph(y+1,resource)]
    [E1,E2] = [G1.edges(data=True),G2.edges(data=True)]
    [newE1, newE2] = [filter(p[0],E1),filter(p[1],E2)]
    newE1simple = map(lambda x: (x[0],x[1]), newE1)
    addedEdges = filter(lambda x: (x[0],x[1]) not in newE1simple, newE2)
    print len(addedEdges)
    for e in addedEdges:
      toCounts.append(len(G2[e[1]]))
      fromCounts.append(len(G2[e[0]]))

  plt.clf()
  plt.hist(toCounts, normed=False, bins=range(0, 200, 10))
  plt.title("p(new link to|d)")
  plt.savefig(get_images_directory(resource)+'p(d| new link to).png')
  plt.clf()
  plt.hist(fromCounts, normed=False,  bins=range(0, 200, 10))
  plt.title("p(new link from|d)")
  plt.savefig(get_images_directory(resource)+'p(d| new link from).png')
Пример #32
0
def f_import_diff(G1, countries, year):
  impDiffDat={}
  G0=get_graph(year-1, "essex")
  for c1 in countries:
    for c2 in countries:
      try:
        imp0=G0[c2][c1]["weight"]
      except:
        imp0=0
      try:
        imp1=G1[c2][c1]["weight"]
      except:
        imp1=0
      impDiffDat[(c1, c2)]=imp1-imp0
  return impDiffDat
Пример #33
0
def test_vae_epoch(epoch,
                   args,
                   rnn,
                   output,
                   test_batch_size=16,
                   save_histogram=False,
                   sample_time=1):
    rnn.hidden = rnn.init_hidden(test_batch_size)
    rnn.eval()
    output.eval()

    # generate graphs
    max_num_node = int(args.max_num_node)
    y_pred = Variable(
        torch.zeros(test_batch_size, max_num_node,
                    args.max_prev_node)).cuda()  # normalized prediction score
    y_pred_long = Variable(
        torch.zeros(test_batch_size, max_num_node,
                    args.max_prev_node)).cuda()  # discrete prediction
    x_step = Variable(torch.ones(test_batch_size, 1,
                                 args.max_prev_node)).cuda()
    for i in range(max_num_node):
        h = rnn(x_step)
        y_pred_step, _, _ = output(h)
        y_pred[:, i:i + 1, :] = F.sigmoid(y_pred_step)
        x_step = sample_sigmoid(y_pred_step,
                                sample=True,
                                sample_time=sample_time)
        y_pred_long[:, i:i + 1, :] = x_step
        rnn.hidden = Variable(rnn.hidden.data).cuda()
    y_pred_data = y_pred.data
    y_pred_long_data = y_pred_long.data.long()

    # save graphs as pickle
    G_pred_list = []
    for i in range(test_batch_size):
        adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
        G_pred = get_graph(adj_pred)  # get a graph from zero-padded adj
        G_pred_list.append(G_pred)

    # save prediction histograms, plot histogram over each time step
    # if save_histogram:
    #     save_prediction_histogram(y_pred_data.cpu().numpy(),
    #                           fname_pred=args.figure_prediction_save_path+args.fname_pred+str(epoch)+'.jpg',
    #                           max_num_node=max_num_node)

    return G_pred_list
Пример #34
0
 def process_request(self, request):
     app_requests = []
     if request.GET.get('request_ids', None):
         fb = get_session(request)
         request_ids = urllib.unquote(request.GET.get('request_ids'))
         request_ids = request_ids.split(',')
         logger.debug('Got app request ids: %s' % request_ids)
         for id in request_ids:
             r, created = AppRequest.objects.get_or_create(id=int(id))
             if settings.DEBUG and created:
                 try:
                     graph = get_graph(request)
                     r.get_from_facebook(graph, save=True)
                 except GraphAPIError:
                     pass
             app_requests.append(r.id)
         if len(app_requests) > 0:
             fb.app_requests = app_requests
Пример #35
0
 def process_request(self, request):
     app_requests = []
     if request.GET.get('request_ids', None):
         fb = get_session(request)
         request_ids = urllib.unquote(request.GET.get('request_ids'))
         request_ids = request_ids.split(',')
         logger.debug('Got app request ids: %s' % request_ids)
         for id in request_ids:
             r, created = AppRequest.objects.get_or_create(id=int(id))
             if settings.DEBUG and created:
                 try:
                     graph = get_graph(request)
                     r.get_from_facebook(graph, save=True)
                 except GraphAPIError:
                     pass
             app_requests.append(r.id)
         if len(app_requests) > 0:
             fb.app_requests = app_requests
Пример #36
0
    def send_to_facebook(self, object='me', save=False, graph=None, message=None, app_name=None):

        if not graph:
            graph = get_graph(app_name=app_name)
        if not message:
            message = self.message
        app_dict = get_app_dict(app_name)

        #response = post_image(graph.access_token, self.image.file, message, object=object)
        #response = graph.put_photo(self.image.file, message=message)
        image_url = 'http://%s%s' % (app_dict['DOMAIN'], self.image.url)
        logger.debug('image_url: %s' % image_url )
        response = graph.put_photo_url(image_url, message, object)
        logger.debug('response: %s' % response )
        
        if save:
            self.fb_id = response['id']
            self.slug = response['id']
            self.save()
        return response['id']
Пример #37
0
def test_rnn_epoch(epoch, args, rnn, output, test_batch_size=16):
    rnn.hidden = rnn.init_hidden(test_batch_size)
    rnn.eval()
    output.eval()

    # generate graphs
    max_num_node = int(args.max_num_node)
    y_pred_long = Variable(
        torch.zeros(test_batch_size, max_num_node,
                    args.max_prev_node)).cuda()  # discrete prediction
    x_step = Variable(torch.ones(test_batch_size, 1,
                                 args.max_prev_node)).cuda()
    for i in range(max_num_node):
        h = rnn(x_step)
        # output.hidden = h.permute(1,0,2)
        hidden_null = Variable(
            torch.zeros(args.num_layers - 1, h.size(0), h.size(2))).cuda()
        output.hidden = torch.cat((h.permute(1, 0, 2), hidden_null),
                                  dim=0)  # num_layers, batch_size, hidden_size
        x_step = Variable(torch.zeros(test_batch_size, 1,
                                      args.max_prev_node)).cuda()
        output_x_step = Variable(torch.ones(test_batch_size, 1, 1)).cuda()
        for j in range(min(args.max_prev_node, i + 1)):
            output_y_pred_step = output(output_x_step)
            output_x_step = sample_sigmoid(output_y_pred_step,
                                           sample=True,
                                           sample_time=1)
            x_step[:, :, j:j + 1] = output_x_step
            output.hidden = Variable(output.hidden.data).cuda()
        y_pred_long[:, i:i + 1, :] = x_step
        rnn.hidden = Variable(rnn.hidden.data).cuda()
    y_pred_long_data = y_pred_long.data.long()

    # save graphs as pickle
    G_pred_list = []
    for i in range(test_batch_size):
        adj_pred = decode_adj(y_pred_long_data[i].cpu().numpy())
        G_pred = get_graph(adj_pred)  # get a graph from zero-padded adj
        G_pred_list.append(G_pred)

    return G_pred_list
Пример #38
0
 def get_page_access_token(self, request, queryset):
     graph = get_graph(request, force_refresh=True, prefer_cookie=True)
     response = graph.request('me/accounts/')   #&fields=id,access_token
     if response and response.get('data', False):
         data = response['data']
         message = {'count': 0, 'message': u''}
         accounts = {}
         for account in data:
             accounts[int(account['id'])] = account
         for page in queryset:
             if accounts.get(page._id, None):
                 if accounts[page._id].get('access_token', False):
                     queryset.filter(id=page._id).update(_access_token=accounts[page._id]['access_token'])
                     message['message'] = u'%sSet access token for page %s\n' % (message['message'], page._name)
                 else:
                     message['message'] = u'%sDid not get access token for page %s\n' % (message['message'], page._name)
             else:
                 message['message'] = u'%sYou are not admin for page %s\n' % (message['message'], page._name)
         self.message_user(request, '%s\n' % message['message'])
     else:
         self.message_user(request, 'There was an error: %s' % response )
Пример #39
0
def node_feature_extraction(years,featureDict):
  """
  Saves pickle of and returns a dict of feature dicts in the form
        {year:{featureName:{countryCode:featureValue}}}
  :param years: a list e.g. range(1980,2000)
  :param featureDict: a dictionary of features in the form
                    {featureName:featureFunction}
                    featureName is a string
                    featureFunction: takes a graph (and possibly a year) 
                        and outputs a dictionary {node:featureValue}
  """
  featureData = {}
  for year in years:
    print year
    G = get_graph(year,'essex')
    featureData[year] = {}
    for f in featureDict:
      featureData[year][f] = featureDict[f](G,year)
    write(featureData[year],'data/raw/essex/features/pickle/',str(year))
    f_to_csv(featureData[year],'data/raw/essex/features/csv/',str(year))
  return featureData
Пример #40
0
def getEdgeFeatureCSV(years):
  gs={}
  cList=[]
  As=[]
  
  for y in years:
    gs[y]=get_graph(y, "essex")
    cList.append(set(gs[y].nodes()))
  countries = list(set.intersection(*cList))

  
  edgefeatureYears=years[:-1]
  nodefeatureYear=years[-2]
  nodefeatureYears=[nodefeatureYear]

  nodefeatures=node_feature_extraction(nodefeatureYears,nodefeatureDict)
  edgefeatures=edge_feature_extraction(edgefeatureYears, countries, edgefeatureDict)
  nodeToEdgeFeatures=convertNodalFeaturesToEdgeFeatures(countries, nodefeatureYears, nodefeatures)
  edgefeatures.update(nodeToEdgeFeatures)

  features=edgefeatures.keys()
  print features
  filename=open('edgedata.csv', 'wb')
  writer = csv.writer(filename)
  fnames=copy.deepcopy(features)
  fnames.append("t")
  fnames.insert(0, "edge")
  writer.writerow(fnames)
  for c1 in countries:
    for c2 in countries:
      row=[c1+"_"+c2, ]
      for f in features:
        row.append(edgefeatures[f][(c1, c2)])
      try:
        t=gs[years[-1]][c1][c2]["weight"]
      except:
        t=0
      row.append(t)
      writer.writerow(row)
Пример #41
0
def dist_cor(years, resource):
  cors=[]
  for year in years:
    G = get_graph(year,resource)
    distDict=f_distance_pairs(G, G.nodes(), year)
    E=G.edges(data=True)
    ws=[]
    ds=[]
    for e in E:
      ws.append(e[2]["weight"])
      ds.append(distDict[(e[0], e[1])])
    cors.append(stats.pearsonr(ws, ds)[0])
  plt.clf()
  print len(years)
  print len(cors)
  print years
  print cors
  plt.plot(years, cors)
  plt.title("Correlation of Distance and Trade Volume")
  plt.xlabel("year")
  plt.ylabel("correlation")
  plt.show()
Пример #42
0
    def save_to_facebook(self, target, graph=None):
        if not graph: graph=get_graph()
        
        args = {}
        cached_fields = [cached for cached in self._meta.get_all_field_names() if cached.find('_') == 0]
        for fieldname in cached_fields:
            fieldclass = self._meta.get_field(fieldname)
            field = getattr(self, fieldname)
            
            if field:
                if isinstance(fieldclass, models.DateField):
                    args[fieldname[1:]] = field.isoformat()
                elif isinstance(fieldclass, JSONField):
                    args[fieldname[1:]] = json.dumps(field)
                elif isinstance(fieldclass, models.FileField) or isinstance(fieldclass, models.ImageField):
                    raise NotImplementedError  # TODO: use code from image field here
                else:
                    args[fieldname[1:]] = field

        # graph.put_object("me", "feed", message="Hello, world")
        response = graph.put_object(parent_object=str(target), connection_name=self.Facebook.publish, **args)
        return response
Пример #43
0
def macroEvolution(years, resource):
  densities=[]
  diameters=[]
  for y in years:
    g=get_graph(y,resource)
    numNodes=float(len(g.nodes()))
    numEdges=len(g.edges())
    densities.append(numEdges/(numNodes*(numNodes-1)))
    diameters.append(SampledDiameter(g))
  plt.clf()
  plt.plot(years, densities)
  plt.title("Density Evolution")
  plt.xlabel('Year')
  plt.ylabel('Density')
  plt.savefig(get_images_directory(resource)+'density'+'.png')

  plt.clf()
  plt.plot(years, diameters)
  plt.title("Diameter Evolution")
  plt.xlabel('Year')
  plt.ylabel('Diameter')
  plt.savefig(get_images_directory(resource)+'diameter'+'.png')
Пример #44
0
 def get_from_facebook(self, graph=None, save=False, args=None):
     """ Updates the local fields with data from facebook. Use this function."""
     if not graph:
         graph = get_graph()
     target = str(self._id)
     if args:
         target = '%s?%s' % (target, args)
     try:
         response = graph.request(target)
         if response and save:
             self.save_from_facebook(response)
         elif save:
             self._graph = {'django-facebook-error' : 'The query returned nothing. Maybe the object is not published, accessible?',
                            'response': response,
                            'access_token': graph.access_token }
             self.save()
         else:
             return response
     except GraphAPIError:
         logger.warning('Error in GraphAPI')
         if save:
             self.save()
         return None
Пример #45
0
 def get_from_facebook(self, graph=None, save=False, args=None):
     if not graph:
         graph = get_graph()
     target = str(self._id)
     if args:
         target = "%s?%s" % (target, args)
     try:
         response = graph.request(target)
         if response and save:
             self.save_from_facebook(response)
         elif save:
             self._graph = {
                 "django-facebook-error": "The query returned nothing. Maybe the object is not published, accessible?",
                 "response": response,
                 "access_token": graph.access_token,
             }
             self.save()
         else:
             return response
     except GraphAPIError:
         logger.warning("Error in GraphAPI")
         if save:
             self.save()
         return None
Пример #46
0
 def save_model(self, request, obj, form, change):
     graph = get_graph(request, force_refresh=True, prefer_cookie=True)
     obj.get_from_facebook(save=True, graph=graph)
Пример #47
0
from search import NetworkMiner
from utils import get_graph
from plot import plot


if __name__ == '__main__':

    miner = NetworkMiner(
        breadth_limit=5,
        min_popularity=65,
        include_collaborators=False,
        max_pop_size=None,
        verbose=True,
    )

    uri = 'spotify:artist:329e4yvIujISKGKz1BZZbO'
    fname = 'farruko_no_collab'

    miner.write_edgelist(artist=uri, fname=fname)

    G = get_graph('../derivatives/{}_attributes.pkl'.format(fname),
                  '../derivatives/{}.edgelist'.format(fname))

    plot(G, fname='farruko')
Пример #48
0
    try:
        draw_it = sys.argv[3] == 'draw'
    except IndexError:
        draw_it = False


    try:
        draw_rank = int(sys.argv[4])
    except IndexError:
        draw_rank = size/2



    epsilon = 0.001

    g, n, m, s = get_graph(name, size=size)


    ts = get_targets(g, s, [ 2**i for i in xrange(4, int(math.log(n, 2))) if 2**i < n ])
    if draw_it:
        ts = get_targets(g, s, [draw_rank])
    algorithms = [ a_star_bidirectional ]      
#    algorithms = [ dijkstra_cancel, dijkstra_bidirectional, a_star, a_star_bidirectional  ]
#    algorithms = [ dijkstra_cancel, dijkstra_bidirectional, dijkstra_bidirectional_mue, a_star,
#                   a_star_bidirectional, a_star_bidirectional_onesided, a_star_bidirectional_betterpi, cheater ]
    results = defaultdict(list)
    base_results = []
    sys.stdout.write('running ')
    count = 0
    for t, rank in ts:
        base_results.append(dijkstra_cancel(g, s, t))
"""

import utils
import netshield
import ctmc
import numpy as np
import matplotlib.pyplot as plt

name = "graphs/day3.gml"
k = 20
b = 1
lambd = 0.25
mu = 0.1
max_steps = 1000

graph = utils.get_graph(name)
adj = utils.get_adj_np(graph)

res_ns = netshield.netshield(adj, k)

print("Result for NetShield algorithm: Indices: {}, eigendrop: {}".format(
    res_ns[0], res_ns[1]))

res_ns_plus = netshield.netshield_plus(adj, k, b)

print(
    "Result for NetShield+ algorithm with batch size 1: Indices: {}, eigendrop: {}"
    .format(res_ns_plus[0], res_ns_plus[1]))

infected, time_passed = ctmc.ctmc_sis(graph, lambd, mu, max_steps)
Пример #50
0
 def send_to_facebook(self, app_name=None, graph=None):
     if not graph:
         graph = get_graph(request=None, app_name=app_name)
     if self.score < 0:
         raise AttributeError, 'The score must be an integer >= 0.'
     return graph.request('%s/scores' % self.user.id ,'', {'score': str(self.score) })
Пример #51
0
                        type=int,
                        default=20,
                        help="Maximum number of nodes in network")
    parser.add_argument('-v',
                        '--verbose',
                        type=bool,
                        default=True,
                        help="Display output messages")

    args = parser.parse_args()
    print(args)
    miner = NetworkMiner(
        include_collaborators=args.include_collaborators,
        breadth_limit=args.breadth_limit,
        max_pop_size=args.max_pop_size,
        min_popularity=args.min_popularity,
        verbose=args.verbose,
    )
    miner.write_edgelist(artist=args.uri, fname=args.fname)

    G = get_graph('derivatives/{}_attributes.pkl'.format(args.fname),
                  'derivatives/{}.edgelist'.format(args.fname))

    params = [
        args.fname, args.include_collaborators, args.breadth_limit,
        args.max_pop_size, args.min_popularity, args.verbose
    ]
    params = [str(param) for param in params]
    outname = ("_").join(params)
    plot(G, fname=outname)
Пример #52
0
def delete_object(modeladmin, request, queryset):
    graph = get_graph(request)
    for obj in queryset:
        obj.delete(graph=graph, facebook=True)
Пример #53
0
 def save_model(self, request, obj, form, change):
     graph = get_graph(request, force_refresh=True, prefer_cookie=True)
     obj.get_from_facebook(save=True, graph=graph)
"""
Approximates Pareto front for multiobjective immunisation
with the NetShield and NetShield+ methods with epsilon constraint
Then plots the results
"""

import utils
import netshield_qp
import matplotlib.pyplot as plt

graph = utils.get_graph("graphs/karate.gml")
adj = utils.get_adj_np(graph)

res_netshield = netshield_qp.netshield_mo(adj, 1)
res_netshield_plus = netshield_qp.netshield_plus_mo(adj, 1, 1)

# Plotting the Pareto front

coordinates_netshield = ( s['evaluation'] for s in res_netshield)
xy = list(zip(*coordinates_netshield))
plt.plot(xy[0], xy[1], 'ro', label="NetShield")

coordinates_netshield_plus = ( s['evaluation'] for s in res_netshield_plus)
xy = list(zip(*coordinates_netshield_plus))
plt.plot(xy[0], xy[1], 'bo', label="NetShield+")

plt.title("Pareto front")
plt.xlabel("eigendrop")
plt.ylabel("cost")
plt.legend()
plt.show()
Пример #55
0
def delete_object(modeladmin, request, queryset):
    graph = get_graph(request)
    for obj in queryset:
        obj.delete(graph=graph, facebook=True)
Пример #56
0
 async def resolve_graph(self, info, after):
     return get_graph(after)
def get_embeddings(input_file,
                   output_folder,
                   directed=False,
                   walks_per_node=10,
                   steps=80,
                   size=300,
                   window=10,
                   workers=1,
                   verbose=True):
    """
    Performs uniform random walks on given graph and generates its embeddings.

    :param input_file: Path to a file containing an edge list of a graph (str). 
    :param output_folder: Directory where the embeddings will be stored (str).
    :param directed: True if the graph is directed (bool).
    :param walks_per_node: How many random walks will be performed from each node (int).
    :param steps: How many node traversals will be performed for each random walk (int).
    :param size: Base dimensionality of the embedding vector. Should be divisable by 6 (int).
    :param window: The window parameter for the word2vec model (i.e. maximum distance in a random walk where one node can be considered the another node's context) (int).
    :param workers: Number of threads to use when training the word2vec model (int).
    :param verbose: Whether to print progress messages to stdout (bool).
    """

    if verbose:
        print("Getting the graph")
    graph = get_graph(input_file, directed)

    if verbose:
        print("Getting the neighbours' dictionary")
    neighbours = get_neighbours(graph)

    if verbose:
        print("Getting the random walks")
    random_walks = get_random_walks(neighbours, walks_per_node, steps)

    if verbose:
        print("Getting the embeddings")
        print(size)
    model = gensim.models.Word2Vec(random_walks,
                                   min_count=0,
                                   size=size,
                                   window=window,
                                   iter=1,
                                   sg=1,
                                   workers=workers)
    model.wv.save_word2vec_format(
        os.path.join(output_folder, 'embeddings_' + str(size) + '.csv'))

    if verbose:
        print(int(size / 2))
    model = gensim.models.Word2Vec(random_walks,
                                   min_count=0,
                                   size=int(size / 2),
                                   window=window,
                                   iter=1,
                                   sg=1,
                                   workers=workers)
    model.wv.save_word2vec_format(
        os.path.join(output_folder,
                     'embeddings_' + str(int(size / 2)) + '.csv'))

    if verbose:
        print(int(size / 3))
    model = gensim.models.Word2Vec(random_walks,
                                   min_count=0,
                                   size=int(size / 3),
                                   window=window,
                                   iter=1,
                                   sg=1,
                                   workers=workers)
    model.wv.save_word2vec_format(
        os.path.join(output_folder,
                     'embeddings_' + str(int(size / 3)) + '.csv'))
def get_embeddings(input_file, output_folder, directed=False, walks_per_node=10, steps=80,
                   size=300, window=10, workers=1, metric='jaccard', verbose=True):  
    """
    Performs non-uniform random walks (on neighboring nodes) on given graph and generates its embeddings.

    :param input_file: Path to a file containing an edge list of a graph (str). 
    :param output_folder: Directory where the embeddings will be stored (str).
    :param directed: True if the graph is directed (bool).
    :param walks_per_node: How many random walks will be performed from each node (int).
    :param steps: How many node traversals will be performed for each random walk (int).
    :param size: Dimensionality of the embedding vector. Should be divisable by 6  (int).
    :param window: The window parameter for the word2vec model (i.e. maximum distance in a random walk where one node can be considered the another node's context) (int).
    :param workers: Number of threads to use when training the word2vec model (int).
    :param metric: The metric which will be used to generate similarities (str).
    :param verbose: Whether to print progress messages to stdout (bool).
    """

    if verbose:
        print("Getting the graph")
    graph = get_graph(input_file, directed)
        
    if verbose:
        print("Getting the neighbours' dictionary")
    neighbours = get_neighbours(graph)
    
    if verbose:
        print("Getting the similarities")

    if metric == "common_neighbours":
        similarities = get_similarities_common_neighbours(neighbours)
    elif metric == 'jaccard':
        similarities = get_similarities_jaccard(neighbours)
    elif metric == 'euclidean':
        adjacency_dictionary = get_adjacency(graph)
        similarities = get_similarities_euclidean(neighbours, adjacency_dictionary)
    elif metric == 'cosine':
        adjacency_dictionary = get_adjacency(graph)
        similarities = get_similarities_cosine(neighbours, adjacency_dictionary)
    elif metric == 'pearson':
        adjacency_dictionary = get_adjacency(graph)
        similarities = get_similarities_pearson(neighbours, adjacency_dictionary)
    else:
        raise ValueError("Invalid value for parameter 'metric'.\n" + \
                         "Should be one of: 'common_neighbours', 'jaccard', 'euclidean', 'cosine', 'pearson'")
    if verbose:
        print("Getting the random walks")
    random_walks = get_random_walks(neighbours, similarities, walks_per_node, steps)

    if verbose:
        print("Getting the embeddings")
        print(size)
    model = gensim.models.Word2Vec(random_walks, min_count=0, size=size, window=window, iter=1, sg=1, workers=workers)
    model.wv.save_word2vec_format(os.path.join(output_folder, 'embeddings_' + str(size) + '.csv'))
    
    if verbose:
        print(int(size/2))
    model = gensim.models.Word2Vec(random_walks, min_count=0, size=int(size/2), window=window, iter=1, sg=1, workers=workers)
    model.wv.save_word2vec_format(os.path.join(output_folder, 'embeddings_' + str(int(size/2)) + '.csv'))
    
    if verbose:
        print(int(size/3))
    model = gensim.models.Word2Vec(random_walks, min_count=0, size=int(size/3), window=window, iter=1, sg=1, workers=workers)
    model.wv.save_word2vec_format(os.path.join(output_folder, 'embeddings_' + str(int(size/3)) + '.csv'))
"""A Moore Machine modeled on Dublin's City parking meters."""
from fsm import *
from utils import get_graph

parking_meter = MooreMachine('Parking Meter')

ready = State('Ready', initial=True)
verify = State('Verify')
await_action = State(r'Await\naction')
print_tkt = State('Print ticket')
return_money = State(r'Return\nmoney')
reject = State('Reject coin')
ready[r'coin inserted'] = verify

verify.update({'valid': State(r'add value\rto ticket'), 
               'invalid': reject})

for coin_value in verify:
    verify[coin_value][''] = await_action

await_action.update({'print': print_tkt,
                     'coin': verify,
                     'abort': return_money,
                     'timeout': return_money})
return_money[''] = print_tkt[''] = ready
get_graph(parking_meter).draw('.tmp/parking.png', prog='dot')
Пример #60
0
            avg_time = accum_time / (n_times - n_cold_start)
            print('hidden size: {}, avg time: {}'.format(n_hid, avg_time))
        except:
            print('hidden size: {}, OOM'.format(n_hid))


if __name__ == '__main__':
    parser = argparse.ArgumentParser("Benchmark DGL kernels")
    parser.add_argument('--spmm-binary', type=str, default='copy_lhs')
    parser.add_argument('--spmm-reduce', type=str, default='sum')
    parser.add_argument('--sddmm-binary', type=str, default='add')
    parser.add_argument('--gpu', '-g', type=str, default='-1')
    args = parser.parse_args()
    if args.gpu == '-1':
        ctx = F.cpu()
    else:
        ctx = F.gpu()
    ctx_str = 'cpu' if args.gpu == '-1' else 'gpu'

    for dataset in ['reddit', 'arxiv', 'proteins']:
        g = get_graph(dataset)
        g = g.int().to(ctx)
        print(g)
        # SPMM
        bench_spmm(g, ctx, args.spmm_binary, args.spmm_reduce)
        # SDDMM
        if ctx_str == 'cpu':
            continue  # sddmm out of mem on cpu will result in termination of the program.
        bench_sddmm(g, ctx, args.sddmm_binary)
        del g