def on_get(self, req, resp): params = req.params try: x_min = float(params['x_min']) x_max = float(params['x_max']) y_min = float(params['y_min']) y_max = float(params['y_max']) except ValueError: resp.status = falcon.HTTP_404 return x_gap_max = 1.5 y_gap_max = 0.5 x_min, x_max, y_min, y_max = limit_range(x_min, x_max, y_min, y_max, x_gap_max, y_gap_max) connection = psycopg2.connect(dbname=database, user=database_user, password=database_password) cursor = connection.cursor() postgis.register(cursor) cursor.execute( """ select commune, class_0_sum, class_1_sum, class_2_sum, class_3_sum, total, nom, wikipedia, surf_m2, geom from commune_merge where geom && st_makeenvelope(%s, %s, %s, %s, 4326); """, (x_min, y_min, x_max, y_max)) data = cursor.fetchall() cursor.close() connection.close() features = [] for i, data_commune in enumerate(data): features.append({ 'type': 'Feature', 'properties': { 'commune': data_commune[0], 'class_0_sum': data_commune[1], 'class_1_sum': data_commune[2], 'class_2_sum': data_commune[3], 'class_3_sum': data_commune[4], 'total': data_commune[5], 'nom': data_commune[6], 'wikipedia': data_commune[7], 'surf_m2': float(data_commune[8]), }, 'geometry': data_commune[9].geojson }) resp.status = falcon.HTTP_200 resp.set_header('Access-Control-Allow-Origin', '*') resp.body = json.dumps({ 'type': 'FeatureCollection', 'features': features, })
def on_get(self, req, resp): if not os.path.isfile(departement_stats_filename): connection = psycopg2.connect(dbname=database, user=database_user, password=database_password) cursor = connection.cursor() postgis.register(cursor) cursor.execute(""" select departement, class_0_sum, class_1_sum, class_2_sum, class_3_sum, total, nom, nuts3, wikipedia, geom from departement_merge; """) data = cursor.fetchall() cursor.close() connection.close() features = [] for i, data_departement in enumerate(data): geom = data_departement[9] if not geom: continue features.append({ 'type': 'Feature', 'properties': { 'departement': data_departement[0], 'class_0_sum': data_departement[1], 'class_1_sum': data_departement[2], 'class_2_sum': data_departement[3], 'class_3_sum': data_departement[4], 'total': data_departement[5], 'nom': data_departement[6], 'nuts3': data_departement[7], 'wikipedia': data_departement[8], }, 'geometry': geom.geojson }) content = json.dumps({ 'type': 'FeatureCollection', 'features': features, }) with open(departement_stats_filename, 'w') as f: f.write(content) with open(departement_stats_filename, 'r') as f: content = f.read() resp.status = falcon.HTTP_200 resp.set_header('Access-Control-Allow-Origin', '*') resp.body = content
def __getattr__(self, attr): if not self.obj: db = PostgresqlExtDatabase( self.prefix + config.DB_NAME, user=config.get('DB_USER'), password=config.get('DB_PASSWORD'), host=config.get('DB_HOST'), port=config.get('DB_PORT'), autorollback=True, ) self.initialize(db) postgis.register(self.obj.get_cursor()) return getattr(self.obj, attr)
def run(self): print "my_thread_id:", self.threadID #conn = psycopg2.connect(PSQL_URI) #cur = conn.cursor() register(self.cur) for time_start in self.time_start_list: file_name = self.file_path + 'minute_' + str( time_start) + '.geojson' if os.path.exists(file_name): with open(file_name) as f: data = json.load(f) insert_data = [] insert_string_block = "INSERT INTO " + table_name + \ "(gid, unix_time_start, interval_in_second, speed_median, speed_mean, car_cnt_20mins, class) " + \ "VALUES " uinx_time_start = zero_oclock + (time_start - 1) * 60 for feature in data['features']: if len(feature['geometry']['coordinates']) > 1: gid = feature['properties']['gid'] speed_median = feature['properties']['median_speed'] speed_mean = feature['properties']['mean_speed'] car_cnt = feature['properties']['car_cnt'] class_osm = int(float( (feature['properties']['class']))) insert_data.append( (gid, uinx_time_start, 60 * TIME_WINDOW, speed_median, speed_mean, car_cnt, class_osm)) if len(insert_data) > 0: try: """ self.cur.execute(insert_string,insert_data) self.conn.commit() """ args_str = ','.join( self.cur.mogrify("(%s,%s,%s,%s,%s,%s,%s)", x) for x in insert_data) self.cur.execute(insert_string_block + args_str) self.conn.commit() except: # Rollback in case there is any error e, t, tb = sys.exc_info() print "caught", e, t, tb self.conn.rollback() print "Insert done! Thread id = ", self.threadID self.cur.close() self.conn.close()
def initialize_connection(self, conn): if not self.postgis_registered: postgis.register(conn.cursor()) self.postgis_registered = True
def run(self): print "my_thread_id:", self.threadID #conn = psycopg2.connect(PSQL_URI) #cur = conn.cursor() register(self.cur) for car_id in self.car_list: file_name = self.file_path + 'one_car_traj_' + str( car_id) + '.geojson' if os.path.exists(file_name): if car_id % 100 == 0: print "car_id:", car_id, "," with open(file_name) as f: data = json.load(f) insert_data = [] insert_string_block = "INSERT INTO " + table_name + \ "(unix_time, car_id, time_string, osm_id, gid, percent, unix_time_pre, gid_pre, pick_or_drop, speed,date, the_geom) " + \ "VALUES " for feature in data['features']: if len(feature['geometry']['coordinates']) > 1: try: dist = feature['properties']['Dist'] except: continue time_current = feature['properties']['ABSTIME'] time_current_string = feature['properties']['TIME'] osm_id = feature['properties']['OSM_ID'] gid_current = feature['properties']['GID'] percent_current = feature['properties']['Percent'] #percent_pre = feature['properties']['Percent_pre'] gid_pre = feature['properties']['GID_pre'] pick_or_drop = feature['properties']['Pick_or_drop'] time_pre = feature['properties']["ABS_TIME_PRE"] coors = feature['geometry']['coordinates'] line_seg = [] for item in coors: line_seg.append(tuple(item)) insert_string = "INSERT INTO " + table_name + \ "(unix_time, car_id, time_string, osm_id, gid, percent, unix_time_pre, gid_pre, pick_or_drop, speed,date, the_geom) " + \ "VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" """ insert_data = (time_current,car_id,time_current_string,osm_id,gid_current, percent_current,time_pre,gid_pre,pick_or_drop,dist/(time_current - time_pre),date, LineString(line_seg, srid=4326)) """ insert_data.append( (time_current, car_id, time_current_string, osm_id, gid_current, percent_current, time_pre, gid_pre, pick_or_drop, dist / (time_current - time_pre), date, LineString(line_seg, srid=4326))) if len(insert_data) > 0: try: """ self.cur.execute(insert_string,insert_data) self.conn.commit() """ args_str = ','.join( self.cur.mogrify( "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", x) for x in insert_data) self.cur.execute(insert_string_block + args_str) self.conn.commit() except: # Rollback in case there is any error e, t, tb = sys.exc_info() print "caught", e, t, tb self.conn.rollback() print "Insert done! Thread id = ", self.threadID self.cur.close() self.conn.close()
database_host = config['main']['database_host'] database_port = config['main']['database_port'] database_name = config['main']['database_name'] database_username = config['main']['database_username'] database_password = config['main']['database_password'] assert database_host == 'localhost' assert database_port == '1234' nb_hours = int(sys.argv[1]) nb_worker = int(sys.argv[2]) id_worker = int(sys.argv[3]) duration_in_seconds = nb_hours * 3600 start_time = time.time() end_time = start_time + duration_in_seconds # Open connection connection = psycopg2.connect(dbname=database_name, user=database_username, password=database_password) cursor = connection.cursor() postgis.register(cursor) cursor.execute('set enable_seqscan = off;') while (time.time() < end_time): compute_angle.process_buildings(connection, cursor, nb_worker, id_worker) cursor.close() connection.close()
def on_get(self, req, resp): params = req.params try: x_min = float(params['x_min']) x_max = float(params['x_max']) y_min = float(params['y_min']) y_max = float(params['y_max']) except ValueError: resp.status = falcon.HTTP_404 return x_gap_max = 0.5 y_gap_max = 0.1 x_min, x_max, y_min, y_max = limit_range(x_min, x_max, y_min, y_max, x_gap_max, y_gap_max) connection = psycopg2.connect(dbname=database, user=database_user, password=database_password) cursor = connection.cursor() postgis.register(cursor) cursor.execute( """ select id_osm, geom, class_0, class_1, class_2, class_3 from buildings where geom && st_makeenvelope(%s, %s, %s, %s, 4326); """, (x_min, y_min, x_max, y_max)) data = cursor.fetchall() cursor.close() connection.close() if len(data) != 0: id_osm, geom, class_0, class_1, class_2, class_3 = zip(*data) probas = np.transpose( np.array([class_0, class_1, class_2, class_3], dtype=np.float)) unknown_mask = np.isnan(probas).sum(1).astype(np.bool) probas_num = np.nan_to_num(probas) class_max = np.argmax(probas_num, axis=1) class_max_score = np.max(probas_num, axis=1) class_max_score class_estimation = class_max * (class_max_score > 0.5) - ( class_max_score <= 0.5) class_estimation[unknown_mask] = -2 features = [] for i in range(len(data)): features.append({ 'type': 'Feature', 'properties': { 'id_osm': id_osm[i], 'class_0': probas[i, 0], 'class_1': probas[i, 1], 'class_2': probas[i, 2], 'class_3': probas[i, 3], 'class_estimation': int(class_estimation[i]), }, 'geometry': geom[i].geojson }) resp.status = falcon.HTTP_200 resp.set_header('Access-Control-Allow-Origin', '*') resp.body = json.dumps({ 'type': 'FeatureCollection', 'features': features, })
import psycopg2 import pytest from postgis import (GeometryCollection, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, register) db = psycopg2.connect(dbname="test") cur = db.cursor() register(db) geoms = [ Point, LineString, Polygon, MultiPoint, MultiLineString, MultiPolygon, GeometryCollection ] def pytest_configure(config): tpl = 'CREATE TABLE IF NOT EXISTS {} ("geom" geometry({}) NOT NULL)' for geom in geoms: name = geom.__name__ cur.execute(tpl.format(name.lower(), name)) def pytest_unconfigure(config): for geom in geoms: cur.execute('DROP TABLE {}'.format(geom.__name__.lower())) db.commit() db.close() @pytest.fixture