def addNeighbour(self, strA, strB): objA = self.findNode(strA) objB = self.findNode(strB) # error check to make sure the data can be accessed if strA in self.vertices and strB in self.vertices: # Det distance between the nodes dist = distance_matrix.distance_matrix(client, (objA.y, objA.x), (objB.y, objB.x), mode="driving") distance = (((dist.get('rows'))[0].get('elements'))[0].get('distance')).get('value') time = (((dist.get('rows'))[0].get('elements'))[0].get('duration')).get('value') # determines the direction that the node is linked dir = math.atan2(objA.y-objB.y, objA.x-objB.x) if (math.pi/4 > abs(dir)): dir = 1 elif (3*math.pi/4 < abs(dir)): dir = 3 elif dir > 0: dir = 0 else: dir = 2 self.edges += 1 # this portion adds the neighbours to each other, since it is unidirectional print("linking:", strA+',', strB) self.vertices[strA].addNeighbour(objB,(dir+2)%4, distance, time) self.vertices[strB].addNeighbour(objA, dir, distance, time) # return if successful return True return False
def get(self): """Simple request handler that shows all of the MySQL variables.""" self.response.headers['Content-Type'] = 'text/plain' try: import googlemaps key = 'AIzaSyCXpUMDiQ4_7NeQdUR-bL9ToVvYH2f64vU' client = googlemaps.Client(key) origins = ["Bobcaygeon ON", [41.43206, -81.38992]] destinations = [(43.012486, -83.6964149), { "lat": 42.8863855, "lng": -78.8781627 }] d1 = d.distance_matrix(client, origins, destinations, mode=None, language=None, avoid=None, units=None, departure_time=None, arrival_time=None, transit_mode=None, transit_routing_preference=None, traffic_model=None) self.response.out.write(str(d1)) except Exception: self.response.out.write("caught exception") '''
def distances(orig_list, dest_list, org_names, dest_names, departure_time=0): """ :param orig_list: origin destination, a list with 2 parameters in (lat,lng) format :param dest_list: same, with destination :param departure_time: self explanatory :return: list of tuples, in 0 place: distance and time for solo driving. in 1, the time with the detour, in 3- the time with transit for the hitchhiker """ matrix_driving = distance_matrix.distance_matrix(gmaps, origins=orig_list, destinations=dest_list) matrix_transit = distance_matrix.distance_matrix(gmaps, origins=orig_list, destinations=dest_list, mode='transit') return from_matrix_get_timing(matrix_driving)[0], from_matrix_get_timing(matrix_driving)[1], \ from_matrix_get_timing(matrix_transit)[0]
def closeness_centrality(g) : """FUNCTION compute closeness centrality of every node in (N, g) INPUT network tuple (N, g) SET adjacency matrix g DETERMINE integer n (the dimension of g) DETERMINE distance matrix D (the first output from Task 15) <the elements of D are l(i,j)> DETERMINE array all_cc_i, all closeness centralities, as floats <float(n - 1) / numpy.sum(D, axis=1)> SET dictionary CC, with key = node number, value = closeness centrality <dict(zip(range(n), all_cc_i))> OUTPUT CC ENDFUNCTION""" n = len(g) D = distance_matrix(g)[0] all_cc_i = float(n - 1) / numpy.sum(D, axis=1) CC = dict(zip(range(n), all_cc_i)) return CC
def is_isomorphic(graphA, graphB): #local function for recursion def submatch(orderB): #現在照合しているノードの個数 size = len(orderB) for i in range(size): #距離行列の、size番目の行がすべて一致するかどうかをチェックする if dmA[orderA[size-1]][orderA[i]] != dmB[orderB[size-1]][orderB[i]]: #一致しない場合はフラグを立ててループを脱出する return False #もし1行全部一致したら #ノード数がテンプレートのノード数に達していたら if size == len(dmB): #完全に一致した! return True else: #境界を拡張する。 #orderBに含まれる頂点に隣接する頂点の集合を作る。 neighbors = set() for v in orderB: #集合演算で簡単! neighbors |= set(graphB[v]) #すでにorderBに含まれている頂点は除く neighbors -= set(orderB) #隣接頂点のなかから1つ選んでorderBを拡張し、再度マッチングを繰り返す subresult = [] for v in neighbors: if submatch(orderB + [v,]): return True return False #頂点の探索順序を定める。n番目の頂点はn-1番目までのグラフに連結になるように。 def progressive_order(graph): orderB = [0] for loop in range(len(graph)-1): #orderBに含まれる頂点に隣接する頂点の集合を作る。 neighbors = set() for v in orderB: #集合演算で簡単! neighbors |= set(graph[v]) #すでにorderBに含まれている頂点は除く neighbors -= set(orderB) v = list(neighbors)[0] orderB.append(v) return orderB #予備チェック。不変量が一致しなければisomorphicではない。 different = False for func in (inv.number_of_vertices, inv.number_of_edges, inv.order_histogram, inv.cycle_histogram): if func(graphA) != func(graphB): different = True break if different: return False #graph A側の探索順序は固定しておく。 orderA = progressive_order(graphA) #あらかじめ距離行列を計算しておく(隣接行列でも可) dmA = dm.distance_matrix(graphA) dmB = dm.distance_matrix(graphB) #graph Bの頂点をひとつずつ始点とする #結果が判明すれば探索は随時打ち切る。(subgraph_isomorphismとは違う挙動) for v in range(len(graphB)): orderB = [v,] if submatch(orderB): return True return False
g = None route_options['Nbar'] = Nbar route_options['g'] = g if route_options['erdos_renyi']: number_nodes = len(g) number_edges = sum(sum(g)) / 2 diameter_g = connected.connected(g) density, Pd = density_degree_distribution.density_degree_distribution((Nbar, g)) DC = degree_centrality.degree_centrality((Nbar, g)) CC = closeness_centrality.closeness_centrality(g) eigenvector_map = centrality_eigenvector.centrality_eigenvector(g) D, average_path_length = distance_matrix.distance_matrix(g) if len(Nbar) > 2 and not numpy.isinf(average_path_length): BC = centrality_betweenness.all_centrality_betweenness(D) print 'Erdos-Renyi' print '# nodes', number_nodes print '# edges', number_edges print 'diameter', diameter_g print 'density', density print '\nBC' if len(Nbar) > 2 and not numpy.isinf(average_path_length):
def get_distance(ad_one, ad_two): gmaps = googlemaps.Client(key=api_key) return distance_matrix.distance_matrix(gmaps, ad_one, ad_two, units='imperial')
def add_network(year, quarter): src = '../input/data_' + str(year) + '_' + str(quarter) + '.bin' f = open(src, 'rb') data = cPickle.load(f) f.close() all_airlines = list_of_airlines(data) all_airports = list_of_airports(data) N = map_airports_code(all_airports) DC_dict = {} CC_dict = {} BC_dict = {} EC_dict = {} density_dict = {} DCroute_dict = {} CCroute_dict = {} BCroute_dict = {} ECroute_dict = {} count = 0 for carrier in all_airlines: print '\t' + carrier + ' (' + str(count + 1) + ' of ' + str(len(all_airlines)) + ')' DC_dict[carrier] = {} CC_dict[carrier] = {} BC_dict[carrier] = {} EC_dict[carrier] = {} DCroute_dict[carrier] = {} CCroute_dict[carrier] = {} BCroute_dict[carrier] = {} ECroute_dict[carrier] = {} g = adjacency_matrix(data, N, carrier) Nbar, gbar = remove_zeros(N, g) network = (N, g) network_bar = (Nbar, gbar) inv_d = invert_dict(Nbar) network_star = route_level_g(network_bar) Nstar = network_star[0] gstar = network_star[1] inv_d_star = invert_dict(Nstar) # diameter_g = connected(gbar) # diameter_gstar = connected(gstar) # # print 'diameter g = ', diameter_g # print 'diameter gstar = ', diameter_gstar D, average_path_length = distance_matrix(gbar) if len(Nstar) > 1: Dstar, average_path_length_star = distance_matrix(gstar) density, Pd = density_degree_distribution(network_bar) # try: # # density_star, Pd_star = density_degree_distribution(network_star) # print density, density_star # # except ZeroDivisionError: # # pass density_dict[carrier] = density DC = degree_centrality(network_bar) DCroute = degree_centrality(network_star) CC = closeness_centrality(gbar) if len(Nstar) > 1: CCroute = closeness_centrality(gstar) eigenvector_map = centrality_eigenvector(gbar) eigenvector_map_route = centrality_eigenvector(gstar) if len(Nbar) > 2 and not numpy.isinf(average_path_length): BC = all_centrality_betweenness(D) # if len(Nstar) > 1 and not numpy.isinf(average_path_length_star): # BCroute = all_centrality_betweenness(Dstar) for key in DC: DC_dict[carrier][inv_d[key]] = DC[key] for key in DCroute: DCroute_dict[carrier][inv_d_star[key]] = DCroute[key] for key in CC: CC_dict[carrier][inv_d[key]] = CC[key] if len(Nstar) > 1: for key in CCroute: CCroute_dict[carrier][inv_d_star[key]] = CCroute[key] if len(Nbar) > 2 and not numpy.isinf(average_path_length): for key in BC: BC_dict[carrier][inv_d[key]] = BC[key] for key in eigenvector_map: EC_dict[carrier][inv_d[key]] = eigenvector_map[key] for key in eigenvector_map_route: ECroute_dict[carrier][inv_d_star[key]] = eigenvector_map_route[key] count += 1 for i in data: origin = i.split('_')[0] dest = i.split('_')[1] route = origin + '_' + dest carrier = i.split('_')[2] # add minimum, maximum degree centrality variable data[i]['mindegree'] = min(DC_dict[carrier][origin], DC_dict[carrier][dest]) data[i]['maxdegree'] = max(DC_dict[carrier][origin], DC_dict[carrier][dest]) # add route-level degree centrality variable data[i]['routedegree'] = DCroute_dict[carrier][route] # add minimum, maximum closeness centrality variable data[i]['mincloseness'] = min(CC_dict[carrier][origin], CC_dict[carrier][dest]) data[i]['maxcloseness'] = max(CC_dict[carrier][origin], CC_dict[carrier][dest]) # add route-level closeness centrality variable try: data[i]['routecloseness'] = CCroute_dict[carrier][route] except KeyError: data[i]['routecloseness'] = 'NA' # add minimum, maximum betweenness centrality variable try: data[i]['minbetweenness'] = min(BC_dict[carrier][origin], BC_dict[carrier][dest]) data[i]['maxbetweenness'] = max(BC_dict[carrier][origin], BC_dict[carrier][dest]) except KeyError: data[i]['minbetweenness'] = 'NA' data[i]['maxbetweenness'] = 'NA' # add minimum, maximum eigenvector centrality variable data[i]['mineigenvector'] = min(EC_dict[carrier][origin], EC_dict[carrier][dest]) data[i]['maxeigenvector'] = max(EC_dict[carrier][origin], EC_dict[carrier][dest]) # add route-level eigenvector centrality variable data[i]['routeeigenvector'] = ECroute_dict[carrier][route] # add density data[i]['density'] = density_dict[carrier] # save bin datafile to \temp (same filename as \input datafile) filename = '../temp/data_' + str(year) + '_' + str(quarter) + '.bin' f = open(filename, 'wb') cPickle.dump(data, f) f.close() return None
while True: address = input('Enter your location: ') if len(address) < 1: break # Use google api to get lat long for origin address (lat,lng)=geocode(address) travel = float(input('How long are you willing to travel in hours? ')) est_miles = travel*40 # super rough guess of how far you could go in an hour # Get potential destinations filtered by "as the crow flies" distances calculated by sql database distance_filtered_locs = calc_distance(lat,lng,est_miles) print(distance_filtered_locs) # use google distance matrix to get actual drive time distances js = distance_matrix(address, distance_filtered_locs) elements = js["rows"][0]["elements"] travel_seconds = travel * 3600 acceptable_indices = [] user_min = int(input("What's your minimum acceptable temperature? ")) user_max = int(input("What's your maximum acceptable temperature? ")) # get list of indexes that pass travel duration test for i in range(len(elements)): if elements[i]["status"] == "OK" and elements[i]["duration"]["value"] < travel_seconds: acceptable_indices.append(i) # print names of acceptable cities ( index in the list of destinations... should convert to IDs when using database) print("Destinations within acceptable drive time: ")
def add_network(year, quarter): test_output = True print '\nadd network measures to data_year_quarter.bin, save to \\temp' src = '..\\input\\data_' + str(year) + '_' + str(quarter) + '.bin' print '\nloading', src, '\n' f = open(src, 'rb') data = cPickle.load(f) f.close() all_airlines = list_of_airlines.list_of_airlines(data) all_airports = list_of_airports.list_of_airports(data) N = map_airports_code.map_airports_code(all_airports) DC_dict = {} CC_dict = {} BC_dict = {} EC_dict = {} density_dict = {} diameter_dict = {} nodes_dict = {} edges_dict = {} DCroute_dict = {} CCroute_dict = {} BCroute_dict = {} ECroute_dict = {} count = 0 for carrier in all_airlines: # test_condition = (carrier == 'AA' and year == 2013 and quarter == 3) test_condition = True print '\t' + carrier + ' (' + str(count + 1) + ' of ' + str(len(all_airlines)) + ')' DC_dict[carrier] = {} CC_dict[carrier] = {} BC_dict[carrier] = {} EC_dict[carrier] = {} DCroute_dict[carrier] = {} CCroute_dict[carrier] = {} BCroute_dict[carrier] = {} ECroute_dict[carrier] = {} g = adjacency_matrix.adjacency_matrix(data, N, carrier) Nbar, gbar = remove_zeros.remove_zeros(N, g) number_nodes = len(gbar) number_edges = sum(sum(gbar)) / 2 nodes_dict[carrier] = number_nodes edges_dict[carrier] = number_edges network = (N, g) network_bar = (Nbar, gbar) inv_d = invert_dict.invert_dict(Nbar) network_star = route_level_g.route_level_g(network_bar) Nstar = network_star[0] gstar = network_star[1] inv_d_star = invert_dict.invert_dict(Nstar) try: diameter_g = connected.connected(gbar) except: diameter_g = 'NA' # diameter_gstar = connected.connected(gstar) # # print 'diameter g = ', diameter_g # print 'diameter gstar = ', diameter_gstar D, average_path_length = distance_matrix.distance_matrix(gbar) if len(Nstar) > 1: Dstar, average_path_length_star = distance_matrix.distance_matrix(gstar) density, Pd = density_degree_distribution.density_degree_distribution(network_bar) # try: # # density_star, Pd_star = density_degree_distribution.density_degree_distribution(network_star) # print density, density_star # # except ZeroDivisionError: # # pass diameter_dict[carrier] = diameter_g density_dict[carrier] = density DC = degree_centrality.degree_centrality(network_bar) DCroute = degree_centrality.degree_centrality(network_star) CC = closeness_centrality.closeness_centrality(gbar) if len(Nstar) > 1: CCroute = closeness_centrality.closeness_centrality(gstar) eigenvector_map = centrality_eigenvector.centrality_eigenvector(gbar) eigenvector_map_route = centrality_eigenvector.centrality_eigenvector(gstar) if len(Nbar) > 2 and not numpy.isinf(average_path_length): BC = centrality_betweenness.all_centrality_betweenness(D) # if len(Nstar) > 1 and not numpy.isinf(average_path_length_star): # # BCroute = centrality_betweenness.all_centrality_betweenness(Dstar) if test_output and test_condition: print '\nTEST OUTPUT' print 'carrier', carrier, 'year', year, 'quarter', quarter print # print 'Nbar', Nbar # print 'gbar[2]', gbar[2] # Austin-Bergstrom International Airport print 'number_nodes', number_nodes print 'number_edges', number_edges # print 'inv_d_star', inv_d_star number_nodes_star = len(gstar) number_edges_star = sum(sum(gstar)) / 2 # print 'number_nodes_star', number_nodes_star # print 'number_edges_star', number_edges_star print 'diameter_g', diameter_g # print 'distance matrix D', D print 'average_path_length', average_path_length print 'density', density # print 'degree distribution Pd', Pd # print 'degree centrality DC', DC print 'overall clustering', clustering_A.cl(gbar) print 'average clustering', clustering_average.cl_avg(gbar) ## http://stackoverflow.com/questions/5927180/removing-data-from-a-numpy-array # iu = numpy.triu_indices(len(gbar), 1) # gbar_upper_triangle = gbar[iu] # X = numpy.ma.masked_equal(gbar_upper_triangle, 0) # gbar_upper_triangle_no_zeros = X.compressed() degree_by_node = numpy.sum(gbar ,axis=1) print 'mean degree', numpy.mean(degree_by_node) print 'median degree', numpy.median(degree_by_node) print 'degree correlation', degree_correlation.calculate(gbar) print max_DC = 0 max_DC_i = None for i in DC: if DC[i] > max_DC: max_DC = DC[i] max_DC_i = i # print 'maximum degree centrality', max_DC, 'index', max_DC_i, 'node', inv_d[max_DC_i] # print 'closeness centrality CC', CC max_CC = 0 max_CC_i = None for i in CC: if CC[i] > max_CC: max_CC = CC[i] max_CC_i = i # print 'maximum closeness centrality', max_CC, 'index', max_CC_i, 'node', inv_d[max_CC_i] # print 'eigenvector_map', eigenvector_map max_EC = 0 max_EC_i = None for i in eigenvector_map: if eigenvector_map[i] > max_EC: max_EC = eigenvector_map[i] max_EC_i = i # print 'maximum eigenvector centrality', max_EC, 'index', max_EC_i, 'node', inv_d[max_EC_i] max_BC = 0 max_BC_i = None for i in BC: if BC[i] > max_BC: max_BC = BC[i] max_BC_i = i # print 'maximum betweenness centrality', max_BC, 'index', max_BC_i, 'node', inv_d[max_BC_i] # raw_input() for key in DC: DC_dict[carrier][inv_d[key]] = DC[key] for key in DCroute: DCroute_dict[carrier][inv_d_star[key]] = DCroute[key] for key in CC: CC_dict[carrier][inv_d[key]] = CC[key] if len(Nstar) > 1: for key in CCroute: CCroute_dict[carrier][inv_d_star[key]] = CCroute[key] if len(Nbar) > 2 and not numpy.isinf(average_path_length): for key in BC: BC_dict[carrier][inv_d[key]] = BC[key] for key in eigenvector_map: EC_dict[carrier][inv_d[key]] = eigenvector_map[key] for key in eigenvector_map_route: ECroute_dict[carrier][inv_d_star[key]] = eigenvector_map_route[key] count += 1 centrality_dicts = ({'betweenness': BC_dict, 'closeness': CC_dict, 'degree': DC_dict, 'eigenvector': EC_dict}) other_centrality = other_carrier_centrality.centrality(centrality_dicts) # print # print 'betweenness', BC_dict['AA']['DFW'], other_centrality['betweenness']['AA']['DFW'] # print 'closeness', CC_dict['AA']['DFW'], other_centrality['closeness']['AA']['DFW'] # print 'degree', DC_dict['AA']['DFW'], other_centrality['degree']['AA']['DFW'] # print 'eigenvector', EC_dict['AA']['DFW'], other_centrality['eigenvector']['AA']['DFW'] for i in data: origin = i.split('_')[0] dest = i.split('_')[1] route = origin + '_' + dest carrier = i.split('_')[2] # add minimum, maximum degree centrality variable data[i]['mindegree'] = min(DC_dict[carrier][origin], DC_dict[carrier][dest]) data[i]['maxdegree'] = max(DC_dict[carrier][origin], DC_dict[carrier][dest]) # add origin, destination degree centrality variable data[i]['origindegree'] = DC_dict[carrier][origin] data[i]['destinationdegree'] = DC_dict[carrier][dest] # add route-level degree centrality variable data[i]['routedegree'] = DCroute_dict[carrier][route] # add minimum, maximum closeness centrality variable data[i]['mincloseness'] = min(CC_dict[carrier][origin], CC_dict[carrier][dest]) data[i]['maxcloseness'] = max(CC_dict[carrier][origin], CC_dict[carrier][dest]) # add origin, destination closeness centrality variable data[i]['origincloseness'] = CC_dict[carrier][origin] data[i]['destinationcloseness'] = CC_dict[carrier][dest] # add route-level closeness centrality variable try: data[i]['routecloseness'] = CCroute_dict[carrier][route] except KeyError: data[i]['routecloseness'] = 'NA' # add minimum, maximum betweenness centrality variable try: data[i]['minbetweenness'] = min(BC_dict[carrier][origin], BC_dict[carrier][dest]) data[i]['maxbetweenness'] = max(BC_dict[carrier][origin], BC_dict[carrier][dest]) except KeyError: data[i]['minbetweenness'] = 'NA' data[i]['maxbetweenness'] = 'NA' # add origin, destination betweenness centrality variable try: data[i]['originbetweenness'] = BC_dict[carrier][origin] data[i]['destinationbetweenness'] = BC_dict[carrier][dest] except KeyError: data[i]['originbetweenness'] = 'NA' data[i]['destinationbetweenness'] = 'NA' # add minimum, maximum eigenvector centrality variable data[i]['mineigenvector'] = min(EC_dict[carrier][origin], EC_dict[carrier][dest]) data[i]['maxeigenvector'] = max(EC_dict[carrier][origin], EC_dict[carrier][dest]) # add origin, destination eigenvector centrality variable data[i]['origineigenvector'] = EC_dict[carrier][origin] data[i]['destinationeigenvector'] = EC_dict[carrier][dest] # add route-level eigenvector centrality variable data[i]['routeeigenvector'] = ECroute_dict[carrier][route] # add density data[i]['density'] = density_dict[carrier] # add diameter data[i]['diameter'] = diameter_dict[carrier] # add number of nodes data[i]['nodes'] = nodes_dict[carrier] # add number of edges data[i]['edges'] = edges_dict[carrier] # save bin datafile to \temp (same filename as \input datafile) filename = '..\\temp\\data_' + str(year) + '_' + str(quarter) + '.bin' f = open(filename, 'wb') cPickle.dump(data, f) f.close() return None