def getGraphs(n, graphType): for data in FileIO.iterateJsonFromFile(randomGraphsFolder%graphType): if n==data['n']: graphs = [] for k,g in data['graphs']: graph = my_nx.getGraphFromDict(g) for n in graph.nodes()[:]: graph.node[n]['w']=1 for u,v in graph.edges()[:]: graph.edge[u][v]['w']=1 graphs.append((k, graph)) return graphs
def groupOccurrencesByEpochReduceFinal(self, ep, epochObjects): graph, occurrences = nx.Graph(), defaultdict(list) for occDict in epochObjects: for h, occs in occDict.iteritems(): occurrences[h]+=occs for h in occurrences.keys()[:]: hashtagsMap = dict(filter(lambda l: l[1]>=MIN_OCCURANCES_TO_ASSIGN_HASHTAG_TO_A_LOCATION, [(lid, sum(map(itemgetter(1), l)))for lid, l in groupby(occurrences[h], key=itemgetter(0))])) if hashtagsMap and len(hashtagsMap)>1: nodesUpdated = set() for u, v in combinations(hashtagsMap,2): if u not in nodesUpdated: updateNode(graph, u, hashtagsMap[u]), nodesUpdated.add(u) if v not in nodesUpdated: updateNode(graph, v, hashtagsMap[v]), nodesUpdated.add(v) updateEdge(graph, u, v, min([hashtagsMap[u], hashtagsMap[v]])) if graph.edges(): # totalEdgeWeight = sum([d['w'] for _,_,d in graph.edges(data=True)])+0.0 # for u,v in graph.edges()[:]: graph[u][v]['w']/=totalEdgeWeight yield ep, {'ep': ep, 'graph': my_nx.getDictForGraph(graph)}
def writeNeighborClusters(locationObject, neighborLocationsSelectionMethod, **kwargs): neighborLocations = [neighborLocationsSelectionMethod(checkin, locationObject['users'], **kwargs) for checkin in locationObject['checkins']] neighborLocationCheckins = NeighboringLocationsAnalysis._filterCheckins(neighborLocations, locationObject['lid']) graph = NeighboringLocationsAnalysis.getNeigboringLocationGraph(neighborLocationCheckins, **kwargs) graphWithClusters = NeighboringLocationsAnalysis.getGraphWithClusters(graph, **kwargs) for n in graphWithClusters.nodes(): graphWithClusters.node[n]['label'] = NeighboringLocationsAnalysis.getLocationName(n) gd = Networkx.getDictForGraph(graphWithClusters) # outputFileName = # newGraph.add_nodes_from(data['edges']) # plot(newGraph, draw_edge_labels=True, node_color='#A0CBE2',width=4,edge_cmap=plt.cm.Blues,with_labels=False) # for cluster, score in clusters: # newCluster = [] # for lid in cluster: newCluster.append((lid, NeighboringLocationsAnalysis.getLocationName(lid))) # print cluster,score # print newCluster, score exit()
def nWS(n,k=3,p=0.3): graphsToReturn = [] for i in range(100): print RandomGraphGenerator.newman_watts_strogatz_graph, n, i graphsToReturn.append([i*TIME_UNIT_IN_SECONDS, my_nx.getDictForGraph(newman_watts_strogatz_graph(n,k,p))]) return graphsToReturn
def erdosRenyi(n,p=0.3): graphsToReturn = [] for i in range(100): print RandomGraphGenerator.erdos_renyi_graph, n, i graphsToReturn.append([i*TIME_UNIT_IN_SECONDS, my_nx.getDictForGraph(erdos_renyi_graph(n,p))]) return graphsToReturn
def fastGnp(n,p=0.3): graphsToReturn = [] for i in range(100): print RandomGraphGenerator.fast_gnp_random_graph, n, i graphsToReturn.append([i*TIME_UNIT_IN_SECONDS, my_nx.getDictForGraph(fast_gnp_random_graph(n,p))]) return graphsToReturn
def tempGetGraphs(area, timeRange): return sorted([(d['ep'], my_nx.getGraphFromDict(d['graph']))for d in FileIO.iterateJsonFromFile(tempEpochGraphsFile%(area, '%s_%s'%timeRange))]) def writeTempGraphs(area, timeRange):
def powerlawClusterGraph(n,m=3,p=0.3): graphsToReturn = [] for i in range(100): print RandomGraphGenerator.powerlaw_cluster_graph, n, i graphsToReturn.append([i*TIME_UNIT_IN_SECONDS, my_nx.getDictForGraph(powerlaw_cluster_graph(n,m,p))]) return graphsToReturn