def select_non_parsed_files_satellite(): with connect(DBN) as conn: register(connection=conn) with conn.cursor() as curs: curs.execute(query_select_path_of_non_parsed_files_satellite) rows = curs.fetchall() return [row[0] for row in rows]
def get_station(id): with connect(DBN) as conn: register(connection=conn) with conn.cursor() as curs: data = id, curs.execute(query_select_station_with_id, data) return curs.fetchone()
def __enter__(self, *args, **kwargs): """Context Manager enter point, returns an available connection from the _pool""" Connection._sem_remaining.acquire(blocking=True) self.conn = Connection._pool.getconn(*args, **kwargs) register(self.conn) self.get_connection_status(self.conn) return self.conn
def get_db_connection(self): conn = psycopg2.connect(host=self.POSTGRES_HOST, database=self.POSTGRES_DATABASE, user=self.POSTGRES_USER, password=self.POSTGRES_PASS) register(conn) return conn
def __init__(self): self.conn = psycopg2.connect( host=DB_SERVER, database=DB_NAME, user=DB_USER, password=DB_PASSWORD ) register(self.conn)
def __enter__(self): self.conn = psycopg2.connect( f"dbname='{DB_NAME}' user='******' password='******' host='{DB_HOST}' port='{DB_PORT}'" ) register(self.conn) self.cur = self.conn.cursor() return self.cur
def new(host=None, port=None, user=None, password=None, database=None): db = psycopg2.connect(host=host, port=port, user=user, password=password, database=database) register(db) return db
def before(self, event, context): self.db = records.Database(boto3.client('ssm').get_parameter( Name=f"/bikefinder/{os.environ.get('STAGE', '')}/db_url", WithDecryption=True)['Parameter']['Value']) register(self.db.db.connection) # self.records_db.sqalchemy_connection.dbapi_connection context.db = self.db return event, context
def query(query): con = create_connection() register(con) cursor = con.cursor() cursor.execute(query) result = cursor.fetchone()[0] con.commit() con.close() return result
def setup_pg_connection(self): connection = psycopg2.connect(host=set.host, database=set.database, user=set.user, password=set.password) register(connection) self.cursor = connection.cursor() return self.cursor
def __call__(*args, **kwargs): """returns a newly created connection, which is not maintained by the _pool""" connection = psycopg2.connect( *args, **parse(DATABASE_CONFIG_PATH, 'postgresql', unwanted_fields=["minconn", "maxconn"]), **kwargs) register(connection) Connection.get_connection_status(connection) return connection
def insert_items(self, items, position=None): with connect(self.dbc) as conn: register(connection=conn) with conn.cursor() as curs: data = [self.to_tuple(item, position) for item in items] extras.execute_values(curs, self.insert_query, data, template=None, page_size=100)
def generate_tracks(user): conn = psycopg2.connect("dbname=postgres user="******"select proj_track from tracks where taxi in " + taxis_ids + " and ts <= " + str( i) + " and ts > " + str(i - 10) # print(sql) cursor_psql.execute(sql) results = cursor_psql.fetchall() print(int((i - ts_i) / 10)) # print(len(results)) for row in results: # print(row) temp_row = [] if type(row[0]) is LineString: xy = row[0].coords first = 1 for (x, y) in xy: if first == 1: temp_row.append([x, y]) previousx = x previousy = y first = 0 elif math.sqrt( abs(x - previousx)**2 + abs(y - previousy)**2) < 50: temp_row.append([x, y]) previousx = x previousy = y temp.append(temp_row) offset.append(temp) print("Writting virus state") with open("files/tracks_inf.csv", "w", newline="") as f: writer = csv.writer(f) writer.writerows(offset) conn.close()
def init_connection(): global connection try: connection = psycopg2.connect(dbname=config.DATABASE, user=config.USER, password=config.PASSWORD, host=config.HOSTNAME) register(connection) except psycopg2.Error as e: print("Database connexion error - %s:" % e.args[0]) close_connection()
def select_non_downloaded_files_satellite(): with connect(DBN) as conn: register(connection=conn) with conn.cursor() as curs: curs.execute(query_select_path_of_non_downloaded_files_satellite) rows = curs.fetchall() def to_file(row): date, path = row return File(path=path, modify_date=date) return [to_file(row) for row in rows]
def generate_offsets(user): #define the step in seconds of the animation step = 10 debug = True print("Generating offsets") conn = psycopg2.connect("dbname=postgres user="******"""select distinct taxi from tracks order by 1""" cursor_psql.execute(sql) results = cursor_psql.fetchall() taxis_x ={} taxis_y ={} ts_i = 1570665600 ts_f = ts_i + 10*8630 array_size = int(24*60*60/step) for row in results: taxis_x[int(row[0])] = np.zeros(array_size) taxis_y[int(row[0])] = np.zeros(array_size) if debug: print("query") for i in range(ts_i,ts_f,10): if(debug): print((ts_i-i)/10) sql = "select taxi,st_pointn(proj_track," + str(i) + "-ts) from tracks where ts<" + str(i) + " and ts+st_numpoints(proj_track)>" + str(i) cursor_psql.execute(sql) results = cursor_psql.fetchall() for row in results: x,y = row[1].coords taxis_x[int(row[0])][int((i-ts_i)/10)] = x taxis_y[int(row[0])][int((i-ts_i)/10)] = y offsets = [] for i in range(array_size): l = [] for j in taxis_x: l.append([taxis_x[j][i],taxis_y[j][i]]) offsets.append(l) print("Writting offsets") with open("files/offsets3.csv", "w", newline="") as f: writer = csv.writer(f) writer.writerows(offsets) conn.close()
def db_connect(self): config_mode = None get_config_mode = environ.get('BOUNDARY_SERVICE_CONFIG_MODE', 'Debug') try: self.config_mode = config_dict[get_config_mode.capitalize()] except KeyError: exit('Error: Invalid BOUNDARY_SERVICE_CONFIG_MODE environment variable entry.') pguri = self.config_mode.POSTGIS_DATABASE_URI self.db = psycopg2.connect(pguri) register(self.db) self.cursor = self.db.cursor() if self.db: print("Connected to DB")
def __init__(self, db_name): try: if db_name is None: db_name = DATABASE_NAME_RUIAN self.connection = psycopg2.connect( host=DATABASE_HOST, database=db_name, port=DATABASE_PORT, user=DATABASE_USER, password=DATABASE_PASS) register(self.connection) # self.cursor = None # print('PostGisDatabase created.') except psycopg2.Error as e: result = "Error: Could not connect to database %s at %s:%s as %s" % ( DATABASE_NAME_RUIAN, DATABASE_HOST, DATABASE_PORT, DATABASE_USER) # logger.info("Error: " + e.pgerror) print(str(result) + "\n" + str(e.pgerror))
def create_tables(): conn_string = "host='localhost' dbname='postgres' user='******' password='******'" conn = psycopg2.connect(conn_string) conn.set_isolation_level('ISOLATION_LEVEL_AUTOCOMMIT') cursor = conn.cursor() register(conn) #creando la tabla drop_table = """DROP TABLE IF EXISTS Restaurants""" create_table = """ CREATE TABLE Restaurants ( id TEXT PRIMARY KEY, rating INTEGER, name TEXT, site TEXT, email TEXT, phone TEXT, street TEXT, city TEXT, state TEXT, lat FLOAT, lng FLOAT, geom geometry(POINT,4326) ); """ update_table = """UPDATE restaurants SET geom = ST_SetSRID(ST_Point(lng,lat),4326)::geometry;""" cursor.execute(drop_table) cursor.execute(create_table) #cargando los datos with codecs.open('restaurantes.csv', 'r', encoding='utf-8', errors='ignore') as f: next(f) copy = "COPY Restaurants(id,rating,name,site,email,phone,street,city,state,lat,lng) FROM STDIN with csv" cursor.copy_expert(sql=copy, file=f) cursor.execute(update_table) cursor.close() conn.commit()
def generate_infec_conc(user): conn = psycopg2.connect("dbname=postgres user="******"select count(distinct(t.taxi)) from tracks as t, cont_aad_caop2018 as f where t.taxi in " + taxis_ids + " and t.ts <= " + str( i ) + " and f.distrito = 'PORTO' and st_contains(f.proj_boundary,ST_StartPoint(t.proj_track))" sql_lisboa = "select count(distinct(t.taxi)) from tracks as t, cont_aad_caop2018 as f where t.taxi in " + taxis_ids + " and t.ts <= " + str( i ) + " and f.distrito = 'LISBOA' and st_contains(f.proj_boundary,ST_StartPoint(t.proj_track))" # print(sql_porto) cursor_psql.execute(sql_porto) results = cursor_psql.fetchall() cursor_psql.execute(sql_lisboa) results_lisboa = cursor_psql.fetchall() print(int((i - ts_i) / 10)) # print(results) infected.append([results[0][0], results_lisboa[0][0]]) print("Writting infected PORTO,LISBOA") with open("files/distrito_inf.csv", "w", newline="") as f: writer = csv.writer(f) writer.writerows(infected) conn.close()
def statistics(): lat1 = request.args['lat'] lng1 = request.args['lng'] rad1 = request.args['rad'] #conn_string = "host={h} dbname={db} user={us} password={ps}".format(h=host,db=db,us=user,ps=pw) conn = psycopg2.connect(DB_URL) conn.set_isolation_level('ISOLATION_LEVEL_AUTOCOMMIT') cursor = conn.cursor() register(conn) query = """SELECT COUNT(*) as Count_Inside_Of_Circle, AVG(rating) as Rating_Average,stddev_pop(rating) as Standard_Deviation from restaurants as A where ST_Point_Inside_Circle(a.geom,{lg},{lt},{rd});""".format( lt=lat1, lg=lng1, rd=rad1) cursor.execute(query) columns = [column[0] for column in cursor.description] results = [] for row in cursor.fetchall(): results.append(dict(zip(columns, row))) return jsonify(DecimalEncoder().encode(results[0]))
def generate_taxis_infected_dict(user): conn = psycopg2.connect("dbname=postgres user="******"""select distinct taxi from tracks order by 1""" cursor_psql.execute(sql) results = cursor_psql.fetchall() taxis_dict = {} i = 0 for taxi in results: taxis_dict[i] = taxi[0] i += 1 print(taxis_dict[16]) infected_ids = [] virusStateOffset = pd.read_csv('files/virusState.csv', header=None, low_memory=True) # print(virusStateOffset.loc[0].to_list()) for i in range(0, 8640): inf = virusStateOffset.loc[i].to_list() index = 0 temp = [] for x in inf: if x == 1: # print(index) temp.append(taxis_dict[index]) index += 1 infected_ids.append(temp) print("Writting infected ids") with open("files/taxis_inf.csv", "w", newline="") as f: writer = csv.writer(f) writer.writerows(infected_ids) conn.close()
def show_plot(user, mode): def animate(i): # adicionar timestamp fig.suptitle(str(datetime.datetime.utcfromtimestamp(ts_i + i * 10))) line.set_data(x1[:i], y1[:i]) line.axes.axis([0, x1[i] * 1.2, 0, y1[i] * 1.5]) my = max((porto[i] * 1.5), (lisboa[i] * 1.5)) lineP.set_data(x2[:i], porto[:i]) lineP.axes.axis([0, x2[i] * 1.2, 0, my]) lineL.set_data(x2[:i], lisboa[:i]) lineL.axes.axis([0, x2[i] * 1.2, 0, my]) if (mode != 2): scat.set_offsets(offsets[i]) s = contigioStateOffset.loc[i].to_list() scat.set_facecolors(c[i]) scat.set_sizes(s) if (mode != 1): for (ax1, ay) in zip(xxx[i], yyy[i]): ax[0].plot(ax1, ay, linewidth=0.2, color='black') debug = True csv.field_size_limit(sys.maxsize) frames = 8640 if debug: print("Setup") scale = 1 / 3000000 conn = psycopg2.connect("dbname=postgres user="******"Query para mapa") sql = "select distrito,st_union(proj_boundary) from cont_aad_caop2018 group by distrito" cursor_psql.execute(sql) results = cursor_psql.fetchall() xs, ys = [], [] for row in results: geom = row[1] if type(geom) is MultiPolygon: for pol in geom: xys = pol[0].coords xs, ys = [], [] for (x, y) in xys: xs.append(x) ys.append(y) ax[0].plot(xs, ys, color='black', lw='0.2') if type(geom) is Polygon: xys = geom[0].coords xs, ys = [], [] for (x, y) in xys: xs.append(x) ys.append(y) ax[0].plot(xs, ys, color='black', lw='0.2') if debug: print("Offsets") if (mode != 2): offsets = [] offsetspd = pd.read_csv('files/offsets3.csv', header=None, low_memory=True) for i in range(0, 8640): l = [] for j in offsetspd.loc[i]: x, y = j.split() x = float(x) y = float(y) if (x == 0.0 and y == 0.0): x = -120000 y = -310000 l.append([x, y]) offsets.append(l) x, y = [], [] for i in offsets[0]: x.append(i[0]) y.append(i[1]) if debug: print("Contágio") virusStateOffset = pd.read_csv('files/virusState.csv', header=None, low_memory=True) contigioStateOffset = pd.read_csv('files/sizeState.csv', header=None, low_memory=True) infetadosOffset = pd.read_csv('files/lenState.csv', header=None, low_memory=True) infetadosOffset = pd.read_csv('files/lenState.csv', header=None, low_memory=True) dsit = pd.read_csv('files/distrito_inf.csv', header=None, low_memory=True) #gráfico de infetados x1 = np.linspace(0, 86400, 8641) y1 = infetadosOffset.loc[0].to_list() porto = dsit[0].to_list() lisboa = dsit[1].to_list() #grafico da evo ax[1].title.set_text("Total de Infetados") line, = ax[1].plot(x1, y1, color='k') ax[1].set_xlabel('tempo (s)') ax[1].set_ylabel('Infetados') #grafico por dist x2 = np.linspace(0, 86400, 8640) ax[2].title.set_text("Infetados no Porto e Lisboa") lineP, = ax[2].plot(x2, porto, color='blue') lineP.set_label('Porto') lineL, = ax[2].plot(x2, lisboa, color='orange') lineL.set_label('Lisboa') ax[2].set_xlabel('tempo (s)') ax[2].set_ylabel('Infetados') ax[2].legend(loc='best') if debug: print("tracks") #tracks if (mode != 1): tracks = [] with open('files/tracks_inf.csv', 'r') as csvFile: reader = csv.reader(csvFile) for row in reader: temp = [] for l in row: temp.append(l) tracks.append(temp) xxx = [] yyy = [] patt = '(\-?\d+\.\d+),\ (\-?\d+\.\d+)' repatt = re.compile(patt) for track in tracks: temp_xxx = [] temp_yyy = [] for t in track: temp_x = [] temp_y = [] for match in repatt.findall(t): # print(match[0]) # print(match[1]) temp_x.append(float(match[0])) temp_y.append(float(match[1])) temp_xxx.append(temp_x) temp_yyy.append(temp_y) xxx.append(temp_xxx) yyy.append(temp_yyy) #cores if debug: print("Cores") c = [] for i in range(0, 8640): c.append([ "green" if t == 0 else "red" for t in virusStateOffset.loc[i].to_list() ]) if (mode != 2): scat = ax[0].scatter(x, y, facecolor=c[0], s=contigioStateOffset.loc[i].to_list()) # scat = ax[0].scatter(x,y, facecolor=c[0], s = 3) if (mode != 1): ax[0].plot(xxx[0], yyy[0], linewidth=0.2, color='black') anim = FuncAnimation(fig, animate, interval=10, frames=frames, repeat=False) plt.draw() plt.show()
scat.set_offsets(offsets[i]) scat.set_facecolors(taxis_colors[i]) scale = 1 / 3000000 conn = None conn = psycopg2.connect(database="gabriellima", user="******", password='', host="127.0.0.1", port="9876") ts_i = 1570665600 ts_f = 1570667000 register(conn) # taxis_infection = creatTaxis(conn) xs_min, xs_max, ys_min, ys_max = -120000, 165000, -310000, 285000 width_in_inches = (xs_max - xs_min) / 0.0254 * 1.1 height_in_inches = (ys_max - ys_min) / 0.0254 * 1.1 fig, ax = plt.subplots(figsize=(width_in_inches * scale, height_in_inches * scale)) ax.axis('off') ax.set(xlim=(xs_min, xs_max), ylim=(ys_min, ys_max)) cursor_psql = conn.cursor() sql = "select distrito,st_union(proj_boundary) from cont_aad_caop2018 group by distrito" cursor_psql.execute(sql) results = cursor_psql.fetchall() xs, ys = [], [] for row in results:
import psycopg2 from postgis.psycopg import register from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import Session from sqlalchemy import create_engine Base = automap_base() engine = create_engine("postgresql://*****:*****@localhost:5432/gtfs") Base.prepare(engine, reflect=True) db = psycopg2.connect(host="localhost", dbname="gtfs", user="******", password="******") register(db) @contextmanager def load_csv(fname): """Utility function to handle csv loading""" name = os.path.basename(fname).replace(".txt", "") with open(fname) as f: reader = csv.reader(f) headers = next(reader) NamedTuple = namedtuple(name, headers) yield (NamedTuple, reader) def import_to_db(names, reader, types={}): name = names.__name__
from flask import Flask, request, jsonify, render_template from flask_cors import CORS import psycopg2 from postgis.psycopg import register from postgis import LineString, Point, Polygon, MultiLineString, MultiPolygon connection = psycopg2.connect("host=localhost port=5433 dbname=spatialdb user=postgres password=pub") register(connection) app = Flask(__name__) CORS(app) app.config["DEBUG"] = True @app.route("/heartbeat") def heartbeat(): return jsonify({"status": "healthy"}) @app.route("/") def hello(): message = "Spatial Sample" return render_template('index.html', message=message) @app.route("/location", methods=['POST']) def location(): loc = { 'lat': request.json['lat'], 'lng': request.json['lng'], }
def insert_files(files: [File]): with connect(DBN) as conn: register(connection=conn) with conn.cursor() as curs: data = [file.to_tuple() for file in files] extras.execute_values(curs, query_insert_files, data, template=None, page_size=100)
def update_file_download_flag(path): with connect(DBN) as conn: register(connection=conn) with conn.cursor() as curs: data = True, path curs.execute(query_update_file_download_flag, data)
def update_file_parsed_flag(self, path): with connect(self.dbc) as conn: register(connection=conn) with conn.cursor() as curs: data = True, path curs.execute(self.update_query, data)
#FILL IN YOUR PATH TO THE 'Benioff Ocean Initiative-454f666d1896.json' #credentials_json = '/Users/seangoral/bq_api_test/venv/Benioff Ocean Initiative-454f666d1896.json' credentials_json = '/home/admin/Benioff Ocean Initiative-454f666d1896.json' credentials = service_account.Credentials.from_service_account_file(credentials_json) project_id = 'benioff-ocean-initiative' client = bigquery.Client(credentials= credentials,project=project_id) import psycopg2 from postgis.psycopg import register try: pg_conn = psycopg2.connect("dbname='gis' user='******' port=5432 host=s4w-postgis password='******'") register(pg_conn) except: print("I am unable to connect to the database") cur=pg_conn.cursor() engine = create_engine('postgresql+psycopg2://admin:whalestrike@s4w-postgis:5432/gis') sql = """SELECT mmsi, operator, DATE(timestamp) as day, timestamp, segment_time_minutes, distance_km, implied_speed_knots,