Beispiel #1
0
    def phi_generation_sql(self):
        origins = defaultdict(dict)
        with server_side_cursors(connection):
            cursor = connection.cursor()
            count = PhiGenerator.route_count()

            gen_tt = ConsoleProgress(count, message="Computing Phi")
            sql_query = """
            SELECT r.orig_taz, r.dest_taz, r.od_route_index,
            array(
              SELECT (SELECT vector_index FROM orm_experimentsensor es WHERE es.sensor_id = s.id LIMIT 1)
              FROM orm_sensor s
              WHERE ST_Distance(r.geom_dist, s.location_dist) < 10 AND s.road_type ='Freeway'
            ) AS sensors
            FROM filtered_routes r
            WHERE r.od_route_index < %(num_routes)s
            """
            cursor.execute(sql_query, {'num_routes':self.num_routes})
            for row in cursor:
                gen_tt.increment_progress()
                o, d, rt, rs = row
                origins[(o, d)][rt] = rs
            gen_tt.finish()

        return origins
Beispiel #2
0
    def data(self):
        if self.info:
            return self.info
        else:
            load_phi_progress = ConsoleProgress(1, message="Loading phi")
            if generate_phi:
# TODO: replace with experiment id
                self.info = generate_phi.phi_generation_sql(1)
            else:
                self.info = pickle.load(open(self.__class__.data_prefix+'/phi.pickle'))
            load_phi_progress.finish()
            return self.info
Beispiel #3
0
 def data(self):
     if self.info:
         return self.info
     else:
         load_phi_progress = ConsoleProgress(1, message="Loading phi")
         if generate_phi:
             # TODO: replace with experiment id
             self.info = generate_phi.phi_generation_sql(1)
         else:
             self.info = pickle.load(
                 open(self.__class__.data_prefix + '/phi.pickle'))
         load_phi_progress.finish()
         return self.info
Beispiel #4
0
 def __init__(self, compute_trip_counts=False):
     self.flow = None
     if compute_trip_counts:
         raise NotImplementedError
     else:
         self.rad, TAZ = np.zeros((self.N_TAZ,self.N_TAZ)), np.zeros(self.N_TAZ)
         load_radiation_progress = ConsoleProgress(self.N_TAZ*self.N_TAZ, message="Loading radiation model heuristic")
         with open(self.data_prefix+'/trips.csv') as file:
           reader = csv.reader(file,delimiter=',')
           firstline = file.readline()   # skip the first line
           for prog, row in enumerate(reader):
             self.rad[int(row[2]),int(row[3])] = int(float(row[6]))
             load_radiation_progress.update_progress(prog)
         load_radiation_progress.finish()
Beispiel #5
0
def import_routes():
    taz_lookup = pickle.load(open(data_prefix+'/lookup.pickle'))

    def compute_route_time(route):
        travel_time = 0
        for leg in route['legs']:
            travel_time += leg['duration']['value']
        return travel_time

    def getRoutesAndSave(o, d):
        data = json.load(open(data_prefix+'/data/%s_%s.json' % (o, d)))
        for route_index, route in enumerate(data['routes']):
            gpolyline = route['overview_polyline']['points']
            linestring = google_lines.decode_line(gpolyline)
            linestring.set_srid(canonical_projection)
            linestring_dist = linestring.clone()
            linestring_dist.transform(google_projection)
            route_object = Route(geom=linestring, geom_dist=linestring_dist, \
                    summary=route['summary'], origin_taz=taz_lookup[o], \
                    destination_taz=taz_lookup[d], \
                    travel_time=compute_route_time(route), \
                    od_route_index=route_index, \
                    json_contents=json.dumps(route))
            route_object.save()

    # Get list of origins
    files = os.listdir(data_prefix+'/data')
    origins = {}
    for file in files:
      file = file.replace('.json', '')
      o, d = map(int, file.split('_'))
      if not o in origins:
        origins[o] = {}
      if not d in origins[o]:
        origins[o][d] = []

    # Load all routes from origins
    gen_tt = ConsoleProgress(N_TAZ*(N_TAZ-1), message="Saving to database.")
    for index_o, o in enumerate(origins):
      for index_d, d in enumerate(origins[o]):
        getRoutesAndSave(o, d)
        gen_tt.increment_progress()
    gen_tt.finish()
Beispiel #6
0
    def phi_generation_sql(self):
        origins = defaultdict(dict)
        with server_side_cursors(connection):
            cursor = connection.cursor()
            count = PhiGenerator.route_count()

            gen_tt = ConsoleProgress(count, message="Computing Phi")
            sql_query = """
            SELECT r.orig_taz, r.dest_taz, r.od_route_index,
            array(
              SELECT (SELECT vector_index FROM orm_experimentsensor es WHERE es.sensor_id = s.id LIMIT 1)
              FROM orm_sensor s
              WHERE ST_Distance(r.geom_dist, s.location_dist) < 10 AND s.road_type ='Freeway'
            ) AS sensors
            FROM filtered_routes r
            WHERE r.od_route_index < %(num_routes)s
            """
            cursor.execute(sql_query, {'num_routes': self.num_routes})
            for row in cursor:
                gen_tt.increment_progress()
                o, d, rt, rs = row
                origins[(o, d)][rt] = rs
            gen_tt.finish()

        return origins
def main():
    parser = argparse.ArgumentParser(description='Solve Tomography problem with radiation model.')
    parser.add_argument('--verbose', dest='verbose',
                       const=True, default=False, action='store_const',
                       help='Show verbose output (default: silent)')
    parser.add_argument('--data-prefix', dest='prefix', nargs='?', const='data',
                       default='.', help='Set prefix for data files (default: .)')
    parser.add_argument('--log', dest='log', nargs='?', const='INFO',
                       default='WARN', help='Set log level (default: WARN)')
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    if args.log in ACCEPTED_LOG_LEVELS:
        logging.basicConfig(level=eval('logging.'+args.log))
    if args.prefix[-1] == '/':
        data_prefix = args.prefix[:-1]
    else:
        data_prefix = args.prefix
    script_progress = ConsoleProgress(N_TAZ, args.verbose, message='Processing travel times for routes')
    with open(data_prefix+'/travel_times.csv', 'wb') as csvfile:
        ttwriter = csv.writer(csvfile, delimiter=',')
        ttwriter.writerow(['origin_index','destination_index','route_index','travel_time'])
        for o in xrange(N_TAZ):
            for d in xrange(N_TAZ):
                if o == d:
                    continue
                routes_for_od_pair = od_travel_time(data_prefix, o, d)
                for route_index, travel_time in enumerate(routes_for_od_pair):
                    ttwriter.writerow([o, d, route_index, travel_time])
            script_progress.update_progress(o+1)
    script_progress.finish()
Beispiel #8
0
def phi_generation_sql(experiment_id):
    origins = defaultdict(dict)
    with server_side_cursors(connection):
        cursor = connection.cursor()

        gen_tt = ConsoleProgress(N_ROUTES, message="Computing Phi")
        sql_query = """
        SELECT om.matrix_id, dm.matrix_id, r.od_route_index,
        array(
          SELECT (SELECT vector_index FROM orm_experimentsensor es WHERE es.sensor_id = s.id AND es.experiment_id = %s LIMIT 1)
          FROM orm_sensor s
          WHERE ST_Distance(r.geom_dist, s.location_dist) < 10
        ) AS sensors
        FROM orm_route r, orm_matrixtaz om, orm_matrixtaz dm
        WHERE r.origin_taz = om.taz_id AND r.destination_taz = dm.taz_id
        """
        cursor.execute(sql_query, (experiment_id,))
        for row in cursor:
            gen_tt.increment_progress()
            o, d, rt, rs = row
            origins[(o, d)][rt] = rs
        gen_tt.finish()

    return origins
Beispiel #9
0
 def generate_od_travel_time_pairs(self):
     gen_tt = ConsoleProgress(self.N_TAZ, message="Loading travel times")
     od_pair_matrix = [[{} for x in range(self.N_TAZ)] for y in range(self.N_TAZ)]
     with open(self.__class__.data_prefix+'/travel_times.csv') as fopen:
         reader = csv.reader(fopen,delimiter=',')
         # skip reading first line
         firstline = fopen.readline()
         for row in reader:
             od_pair_matrix[int(row[0])][int(row[1])][int(row[2])] = float(int(row[3]))
             gen_tt.update_progress(int(row[0]))
     gen_tt.finish()
     return od_pair_matrix
Beispiel #10
0
    def __init__(self, compute=True, phi=None, condensed_map=None, generate_phi=None, use_travel_times=False):
        if compute:
            # Load the data.
            data_progress = ConsoleProgress(1, message="Loading phi")
            self.condensed_map = condensed_map
# TODO: replace with experiment id
            data = generate_phi.phi_generation_sql(1)
            phi.set_data(data)
            data_progress.finish()
            self.generate_routing_matrix(data, use_travel_times)
            sio.savemat(self.__class__.data_prefix+'/X_matrix.mat', {
                'X' : self.X,
                'x' : self.x,
                'U' : self.U
            })
        else:
            x_load_progress = ConsoleProgress(1, message="Loading X matrix from file")
            loaded_data = sio.loadmat(self.__class__.data_prefix+'/X_matrix.mat')
            self.X = loaded_data['X']
            self.U = loaded_data['U']
            self.x = loaded_data['x']
            x_load_progress.finish()
        self.phi = phi
Beispiel #11
0
 def __init__(self, compute_trip_counts=False):
     self.flow = None
     if compute_trip_counts:
         raise NotImplementedError
     else:
         self.rad, TAZ = np.zeros(
             (self.N_TAZ, self.N_TAZ)), np.zeros(self.N_TAZ)
         load_radiation_progress = ConsoleProgress(
             self.N_TAZ * self.N_TAZ,
             message="Loading radiation model heuristic")
         with open(self.data_prefix + '/trips.csv') as file:
             reader = csv.reader(file, delimiter=',')
             firstline = file.readline()  # skip the first line
             for prog, row in enumerate(reader):
                 self.rad[int(row[2]), int(row[3])] = int(float(row[6]))
                 load_radiation_progress.update_progress(prog)
         load_radiation_progress.finish()
Beispiel #12
0
def import_routes():
    taz_lookup = pickle.load(open(data_prefix+'/lookup.pickle'))

    def compute_route_time(route):
        travel_time = 0
        for leg in route['legs']:
            travel_time += leg['duration']['value']
        return travel_time

    def getRoutesAndSave(o, d):
        data = json.load(open(data_prefix+'/data/%s_%s.json' % (o, d)))
        for route_index, route in enumerate(data['routes']):
            gpolyline = route['overview_polyline']['points']
            linestring = google_lines.decode_line(gpolyline)
            linestring.set_srid(canonical_projection)
            linestring_dist = linestring.clone()
            linestring_dist.transform(google_projection)
            route_object = Route(geom=linestring, geom_dist=linestring_dist, \
                    summary=route['summary'], origin_taz=taz_lookup[o], \
                    destination_taz=taz_lookup[d], \
                    travel_time=compute_route_time(route), \
                    od_route_index=route_index, \
                    json_contents=json.dumps(route))
            route_object.save()

    # Get list of origins
    files = os.listdir(data_prefix+'/data')
    origins = {}
    for file in files:
      file = file.replace('.json', '')
      o, d = map(int, file.split('_'))
      if not o in origins:
        origins[o] = {}
      if not d in origins[o]:
        origins[o][d] = []

    # Load all routes from origins
    gen_tt = ConsoleProgress(N_TAZ*(N_TAZ-1), message="Saving to database.")
    for index_o, o in enumerate(origins):
      for index_d, d in enumerate(origins[o]):
        getRoutesAndSave(o, d)
        gen_tt.increment_progress()
    gen_tt.finish()
Beispiel #13
0
def phi_generation_sql(experiment_id):
    origins = defaultdict(dict)
    with server_side_cursors(connection):
        cursor = connection.cursor()

        gen_tt = ConsoleProgress(N_ROUTES, message="Computing Phi")
        sql_query = """
        SELECT om.matrix_id, dm.matrix_id, r.od_route_index,
        array(
          SELECT (SELECT vector_index FROM orm_experimentsensor es WHERE es.sensor_id = s.id AND es.experiment_id = %s LIMIT 1)
          FROM orm_sensor s
          WHERE ST_Distance(r.geom_dist, s.location_dist) < 10
        ) AS sensors
        FROM orm_route r, orm_matrixtaz om, orm_matrixtaz dm
        WHERE r.origin_taz = om.taz_id AND r.destination_taz = dm.taz_id
        """
        cursor.execute(sql_query, (experiment_id, ))
        for row in cursor:
            gen_tt.increment_progress()
            o, d, rt, rs = row
            origins[(o, d)][rt] = rs
        gen_tt.finish()

    return origins
Beispiel #14
0
 def generate_routing_matrix(self, data, use_travel_times):
     """
     Given the route index associated with each OD pair, generate a routing matrix.
     """
     self.X = lil_matrix((self.N_SENSORS, self.N_TAZ*self.N_TAZ))
     self.x = np.zeros(self.N_ROUTES_CONDENSED)
     x_ind = 0
     if use_travel_times:
         od_pair_travel_times = self.generate_od_travel_time_pairs()
     x_gen_progress = ConsoleProgress(self.N_ROUTES, message="Generating X and U matrices")
     self.U = lil_matrix((self.N_TAZ_CONDENSED*(self.N_TAZ_CONDENSED-1), self.N_ROUTES_CONDENSED))
     # For efficiency, the if statement is surrounding these loops so it doesn't check every iteration
     if use_travel_times:
         for i in np.arange(self.N_TAZ):
             for j in np.arange(self.N_TAZ):
                 if data[i].get(j):
                     if data[i][j]:
                         travel_times = od_pair_travel_times[i][j]
                         mean_tt = np.mean(travel_times.values())
                         std_tt = np.std(travel_times.values())
                         if std_tt == 0:
                             std_tt = 1
                         travel_times = {rt : (float(tt-mean_tt) / std_tt) for rt, tt in travel_times.items()}
                         travel_times = {rt : sigmoid(-tt) for rt, tt in travel_times.items()}
                         normalizer = float(sum(travel_times.values()))
                         travel_times = {rt : float(tt)/normalizer for rt, tt in travel_times.items()}
                         for route, sensors in enumerate(data[i][j]):
                             tt = travel_times[route]
                             for s in sensors:
                                 self.X[s,i*self.N_TAZ+j] += tt
                             if i in self.condensed_map and j in self.condensed_map:
                                 od_back_map[x_ind] = (i, j)
                                 i_ind = self.condensed_map[i]
                                 j_ind = self.condensed_map[j]
                                 self.x[x_ind] = tt
                                 row_index = i_ind*(self.N_TAZ_CONDENSED-1)+j_ind
                                 if j_ind > i_ind:
                                     row_index -= 1
                                 self.U[row_index, x_ind] = 1
                                 x_ind = x_ind + 1
                             x_gen_progress.increment_progress()
     else:
         for i in np.arange(self.N_TAZ):
             for j in np.arange(self.N_TAZ):
                 if data[i].get(j):
                     if data[i][j]:
                         for route, sensors in enumerate(data[i][j]):
                             if route == self.FIRST_ROUTE:
                                 for s in sensors:
                                     self.X[s,i*self.N_TAZ+j] = 1
                                 self.x[x_ind] = 1
                             if i in self.condensed_map and j in self.condensed_map:
                                 od_back_map[x_ind] = (i, j)
                                 i_ind = self.condensed_map[i]
                                 j_ind = self.condensed_map[j]
                                 row_index = i_ind*(self.N_TAZ_CONDENSED-1)+j_ind
                                 if j_ind > i_ind:
                                     row_index -= 1
                                 self.U[row_index, x_ind] = 1
                                 x_ind += 1
                             x_gen_progress.increment_progress()
     pickle.dump(od_back_map, open(self.__class__.data_prefix+'/od_back_map.pickle', 'wb'))
     x_gen_progress.finish()
Beispiel #15
0
import math
import logging

logging.basicConfig(level=logging.DEBUG)

data_prefix = ""
N_TAZ = 321
N_TAZ_CONDENSED = 150
N_ROUTES = 280691
N_ROUTES_CONDENSED = 60394
N_SENSORS = 1033
FIRST_ROUTE = 0

data = pickle.load(open('data/phi.pickle'))
phi_db = pickle.load(open('data/phi_condensed1402867630.8_db.pickle'))['phi']

phi_errors = 0
x_gen_progress = ConsoleProgress(N_TAZ * N_TAZ, message="Comparing phi")
for i in np.arange(N_TAZ):
    for j in np.arange(N_TAZ):
        if data[i].get(j):
            if data[i][j]:
                if data[i][j] != map(lambda x: sorted(x[1]), phi_db[(i, j)].iteritems()):
                    phi_errors += 1
                    print data[i][j]
                    print phi_db[(i, j)]
                    if phi_errors >= 10:
                        exit()
                x_gen_progress.increment_progress()
x_gen_progress.finish()
Beispiel #16
0
}

origins = {}
for file in files:
    file = file.replace('.json', '')
    o, d = map(int, file.split('_'))
    if o not in selected_origins or d not in selected_origins:
        continue
    if not o in origins:
        origins[o] = {}
    if not d in origins[o]:
        origins[o][d] = []

num_routes = 0

gen_tt = ConsoleProgress(N_TAZ_TARGET, message="Computing Phi")
count = 0
for index_o, o in enumerate(origins):
    for index_d, d in enumerate(origins[o]):
        routes = getRoutes(o, d)
        num_routes += len(routes)
        for i, route in enumerate(routes):
            rs = route_sensors(route)
            origins[o][d].append(rs)
    gen_tt.update_progress(index_o)
out.close()
gen_tt.finish()

metadata['N_ROUTES_CONDENSED'] = num_routes
metadata['N_SENSORS_USED'] = len(first_leg_sensors)
print 'Sensors used:', metadata['N_SENSORS_USED']
Beispiel #17
0
}

origins = {}
for file in files:
  file = file.replace('.json', '')
  o, d = map(int, file.split('_'))
  if o not in selected_origins or d not in selected_origins:
      continue
  if not o in origins:
    origins[o] = {}
  if not d in origins[o]:
    origins[o][d] = []
  
num_routes = 0

gen_tt = ConsoleProgress(N_TAZ_TARGET, message="Computing Phi")
count = 0
for index_o, o in enumerate(origins):
  for index_d, d in enumerate(origins[o]):
    routes = getRoutes(o, d)
    num_routes += len(routes)
    for i, route in enumerate(routes):
      rs = route_sensors(route)
      origins[o][d].append(rs)
  gen_tt.update_progress(index_o)
out.close()
gen_tt.finish()

metadata['N_ROUTES_CONDENSED'] = num_routes
metadata['N_SENSORS_USED'] = len(first_leg_sensors)
print 'Sensors used:', metadata['N_SENSORS_USED']
Beispiel #18
0
# Read pre-computed trip counts for all OD pairs (simulated with radiation model)
rad_model = radiation_model.RadiationModel()
radflow = rad_model.as_flow()

Use_Real_Sensors = args.real_sensors

sensor_data = sensors.Sensors(Use_Real_Sensors, x_matrix, radflow)
radflow = sensor_data.get_flow()
sensors = sensor_data.sensors
yescounts = sensor_data.yescounts

#
# wlse_tomogravity solved with sparse least squares   
#
lsqr_progress = ConsoleProgress(5, args.verbose, message='Solving LSQR')
bw = sensors - x_matrix.X*radflow
Xcsr = csr_matrix(x_matrix.X)
lsqr_progress.update_progress(1)
tw = lsqr(Xcsr[yescounts,:], bw[yescounts], damp = 100)[0]

plt.hold(True)

# transform tw back to t
t = radflow[:,0] + tw
lsqr_progress.finish()

c_lsqr = x_matrix.X*t
c_rad = x_matrix.X*radflow[:,0]

indexes = [i*N_TAZ_CONDENSED+j for (i, j) in itertools.combinations(condensed_map.keys(), 2)]