def test_stop_finder(self): """ Check that the stop finder returns clusters of nearest stops. Uses the coordinates of 100 random stops """ import os import pickle from dbanalysis.stop_tools import stop_finder stop_finder = stop_finder() BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) with open(BASE_DIR + '/resources/new_stops_dict.bin', 'rb') as handle: stops = pickle.load(handle) handle.close() for i in range(100): import random stop_id = random.choice([k for k in stops.keys()]) s = stops[stop_id] response = stop_finder.find_closest_stops(s['lon'], s['lat']) self.assertEqual(isinstance(response, list), True) for i in response: self.assertEqual('stop_id' in i, True) self.assertEqual('info' in i, True) self.assertEqual('lat' in i['info'], True) self.assertEqual('lon' in i['info'], True)
def __init__(self, build=True): """ Gather all resources and set up the graph """ import json self.routes = json.loads( open( '/home/student/dbanalysis/dbanalysis/resources/trimmed_routes.json', 'r').read()) from dbanalysis.classes import neural_stop from dbanalysis.stop_tools import stop_getter, stop_finder import pickle from dbanalysis.classes import route_selector from dbanalysis.classes import time_tabler_refac2 as time_tabler with open( '/home/student/dbanalysis/dbanalysis/resources/new_stops_dict.bin', 'rb') as handle: self.stops_dict = pickle.load(handle) self.stop_getter = stop_getter() self.selector = route_selector.selector() self.stop_finder = stop_finder() self.time_tabler = time_tabler.time_tabler() #put unavailable_routes here self.unavailable_routes = [] self.nodes = {} print(self.selector.unavailable_routes) if not build: return None count = 0 import os all_models = set(os.listdir('/data/neural_models')) for route in self.routes: for v_num, variation in enumerate(self.routes[route]): #if self.selector.get_unavailable(route,v_num): # print('unavailable',route,v_num) if 3 > 1000: print('eh') else: for i in range(1, len(variation) - 1): stopA = str(variation[i]) stopB = str(variation[i + 1]) distance = self.stop_getter.get_stop_distance( stopA, stopB) if stopA not in self.nodes: stop_object = neural_stop.stop( stopA, self.stops_dict[stopA]) can = stop_object.add_link(stopB, distance) if not can: input('error') stop_object.get_foot_links() self.nodes[stopA] = stop_object else: self.nodes[stopA].add_link(stopB, distance) print(count)
def __init__(self): import json from dbanalysis.classes import simple_stop as stop from dbanalysis.classes import router from dbanalysis.classes.time_tabler_refac import time_tabler from dbanalysis.stop_tools import stop_finder, stop_getter self.stop_finder = stop_finder() self.s_getter = stop_getter() self.route_info = json.loads( open( '/home/student/dbanalysis/dbanalysis/resources/trimmed_routes.json', 'r').read()) self.nodes_map = {} self.nodes = {} self.routes = {} self.time_tabler = time_tabler() #build routes first for route in self.route_info: for v_num, arr in enumerate(self.route_info[route]): r = router.router(route, v_num, arr) if r.has_model(): if route not in self.routes: self.routes[route] = {} self.routes[route][v_num] = r else: del (r) #add stops to graph for i in range(1, len(arr) - 1): if str(arr[i]) not in self.nodes_map: self.nodes_map[str(arr[i])] = {'backlinks':[],\ 'links':set([str(arr[i+1])]),\ 'weight':inf,\ 't':'m'} self.nodes[str(arr[i])] = stop.stop( str(arr[i]), self.s_getter.get_stop_info(str(arr[i]))) else: self.nodes_map[str(arr[i])]['links'].add( str(arr[i + 1])) self.nodes[str(arr[i])].add_link(str(arr[i + 1])) if str(arr[i + 1]) not in self.nodes_map[str(arr[i])]: self.nodes_map[str(arr[i+1])] = {'backlinks':[],\ 'links':set(),\ 'weight':inf,\ 't':'m'} self.nodes[str(arr[i + 1])] = stop.stop( str(arr[i + 1]), self.s_getter.get_stop_info(str(arr[i + 1])))
def __init__(self, build=True): """ Gather all of the resources needed. If build is true, load all of stops and stop link models. """ import json import os self.BASE_DIR = os.path.dirname( os.path.dirname(os.path.abspath(__file__))) self.routes = json.loads( open(self.BASE_DIR + '/resources/trimmed_routes.json', 'r').read()) from dbanalysis.classes import neural_stop from dbanalysis.stop_tools import stop_getter, stop_finder import pickle from dbanalysis.classes import route_selector from dbanalysis.classes import time_tabler_refac2 as time_tabler with open(self.BASE_DIR + '/resources/new_stops_dict.bin', 'rb') as handle: self.stops_dict = pickle.load(handle) self.stop_getter = stop_getter() self.selector = route_selector.selector() self.stop_finder = stop_finder() self.time_tabler = time_tabler.time_tabler() #put unavailable_routes here self.unavailable_routes = [] self.nodes = {} print(self.selector.unavailable_routes) if not build: return None count = 0 import os all_models = set( os.listdir(self.BASE_DIR + '/resources/models/neural_models3')) # for every route and variation, load the stop link models in that list, #provided they haven't already been loaded for route in self.routes: for v_num, variation in enumerate(self.routes[route]): for i in range(1, len(variation) - 1): stopA = str(variation[i]) stopB = str(variation[i + 1]) distance = self.stop_getter.get_stop_distance(stopA, stopB) if stopA not in self.nodes: #load a stop object stop_object = neural_stop.stop(stopA, self.stops_dict[stopA]) # add this link to it stop_object.add_link(stopB, distance) #add this stops walking links stop_object.get_foot_links() self.nodes[stopA] = stop_object else: #if the stop already exists, then just add another link. self.nodes[stopA].add_link(stopB, distance) #might have to delete this # oh my god. It turns out that the stops at the end of the routes are both arrvive and departure #if str(variation[-1]) not in self.nodes: # print('adding last stop',str(variation[-1])) # stop_object = neural_stop.stop(str(variation[-1]),self.stops_dict[str(variation[-1])]) # stop_object.get_foot_links() # self.nodes[stopA] = stop_object print(count)