def posts(): connection = pool.getconn() cursor = connection.cursor() props = {'currentPage': 'posts'} base = request.args.to_dict() base.pop('o', None) query = "SELECT * FROM booru_posts ORDER BY added desc " params = () offset = request.args.get('o') if request.args.get('o') else 0 query += "OFFSET %s " params += (offset, ) limit = request.args.get('limit') if request.args.get( 'limit') and request.args.get('limit') <= 50 else 25 query += "LIMIT %s" params += (limit, ) cursor.execute(query, params) results = cursor.fetchall() cursor.close() if connection: pool.putconn(connection) response = make_response( render_template('posts.html', props=props, results=results, base=base), 200) response.headers[ 'Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000' return response
def getResultSetFromDB(funcName, params): conn = pool.getconn() with conn, conn.cursor(cursor_factory=RealDictCursor) as cursor: cursor.callproc(funcName, params) result = json.dumps(cursor.fetchall(), default=str) pool.putconn(conn) return result
def check_pool(self, pool=None): """checks if either its own or the provided pool is valid Keyword Arguments: pool {[postgre_pool.pool]} -- [pool to be checked] (default: {self.pool}) Returns: [boolean] -- [False if exists, True if it doesn't or is invalid] """ if not pool: if not self.pool: return 1 else: try: test_connection = self.pool.getconn() self.pool.putconn(test_connection) return 0 except Exception: return 1 else: try: test_connection = pool.getconn() pool.putconn(test_connection) return 0 except Exception: return 1
def _connect(self): """Connects to the database Returns ------- bool """ if self.conn is None: try: pool = psycopg2.pool.SimpleConnectionPool(1, 20, host=self.dbhost, database=self.dbname, user=self.dbuser, password=self.dbpass) if not pool: QMessageBox.information( None, self.tr('Error'), self. tr('Could not make a stable connection to the GeoDataFarm server' )) self.conn = pool.getconn() self.conn.set_isolation_level(0) return True except psycopg2.OperationalError as e: QMessageBox.information( None, self.tr('Error'), self.tr( "Error connecting to database on {host}. {e}".format( host=self.dbhost, e=str(e)))) return False
def get_cursor(): pool = get_connection_pool() con = pool.getconn() try: yield con.cursor() finally: pool.putconn(con)
def write_entry(email, query, accepted, exec_time): pool = setup_engine() if not pool: return False cnx = pool.getconn() success = False with cnx.cursor() as cursor: try: cursor.execute( """ INSERT INTO users (email, query, accepted, execution_time) VALUES (%s, %s, %s, %s); """, (email, query, accepted, exec_time)) except Exception as e: print(f"Unable to run the query {e.args}") else: success = True if cursor.rowcount == 1 else False try: cnx.commit() cnxpool.putconn(cnx) except Exception: print("connection already closed") success = False return success
def query(): req = request.get_json() if not req: return make_response('', 400) try: conn = pool.getconn() with conn.cursor() as cur: cur.execute(req['query']) results = cur.fetchall() return jsonify({ 'status': 200, 'results': results }) except (psycopg2.DataError, psycopg2.ProgrammingError, psycopg2.NotSupportedError) as e: # User error return jsonify({ 'status': 400, 'error': e.pgerror }), 400 except: return jsonify({ 'status': 500, 'error': str(sys.exc_info()[1]) }), 500 finally: pool.putconn(conn)
def init_db() -> int: config.init_config() init_pool() with pool.getconn() as conn: with conn.cursor() as cu: with SCHEMA_SCRIPT.open() as f: cu.execute(f.read()) with open(SQLCOPY1, 'r', encoding='utf-8') as f: cu.copy_expert("COPY recipe FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY2, 'r', encoding='utf-8') as f: cu.copy_expert("COPY ingredient FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY3, 'r', encoding='utf-8') as f: cu.copy_expert("COPY recipeToIngredient FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY4, 'r', encoding='utf-8') as f: cu.copy_expert("COPY equipment FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY5, 'r', encoding='utf-8') as f: cu.copy_expert("COPY recipeToEquipment FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY6, 'r', encoding='utf-8') as f: cu.copy_expert("COPY account FROM stdin DELIMITER ',' CSV HEADER", f) with open(SQLCOPY7, 'r', encoding='utf-8') as f: cu.copy_expert("COPY recipe_votes FROM stdin DELIMITER ',' CSV HEADER", f) #with DATA_SCRIPT.open() as f: # cu.execute(f.read()) print("Database initialized!") return 0
def get_connection(): pool = get_connection_pool() try: connection = pool.getconn() yield connection finally: pool.putconn(connection)
def getResultSetFromDBNoJS(funcName, params): conn = pool.getconn() with conn, conn.cursor(cursor_factory=RealDictCursor) as cursor: cursor.callproc(funcName, params) # Convert from RealDict => json => Python list result = json.loads(json.dumps(cursor.fetchall(), default=str)) pool.putconn(conn) return result
def modifyDB(funcName, params): conn = pool.getconn() with conn, conn.cursor(cursor_factory=RealDictCursor) as cursor: cursor.callproc(funcName, params) result = json.dumps(cursor.fetchall()) pool.putconn(conn) # Return status and error message return result
def cursor(): global pool con = pool.getconn() try: yield con.cursor() finally: con.commit() pool.putconn(con)
def get_conn(pool): conn = None try: conn = pool.getconn() conn.autocommit = True yield conn finally: if conn: pool.putconn(conn)
def getConnection(self, name: str): connection = self[name] import psycopg2 if isinstance(connection, psycopg2.pool.ThreadedConnectionPool): pool = connection return pool.getconn() else: return connection
def cursor(): global pool con = pool.getconn() con.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT) try: yield con.cursor() finally: con.commit() pool.putconn(con)
def initialize_database() -> None: connection = pool.getconn() cursor = connection.cursor() cursor.execute("""CREATE TABLE IF NOT EXISTS Packets( mac TEXT, url TEXT, time INTEGER)""") connection.commit() cursor.close()
def db_test(): pool = get_pool() conn = pool.getconn() cursor = conn.cursor() cursor.execute("SELECT * FROM _test") cursor.fetchall() cursor.close() pool.putconn(conn) return "OK"
def getConnectionWithCursor(dictCursor=False): dbConn = pool.getconn() if dictCursor: dbCursor = dbConn.cursor(cursor_factory = psycopg2.extras.DictCursor) else: dbCursor = dbConn.cursor() return dbConn, dbCursor
def create(name, macs): """Create a group""" if name and macs: conn = pool.getconn() cursor = conn.cursor() macs = macs.split(',') cursor.execute("""INSERT INTO Groups(addresses, name) VALUES (%s,%s)""", (macs, name)) conn.commit() cursor.close() click.echo(f'Created the group {name}')
def push_packets_to_database(handshakes: list) -> None: if handshakes: # Is the list not empty? connection = pool.getconn() cursor = connection.cursor() for handshake in handshakes: cursor.execute("INSERT INTO Packets(mac, url, time) VALUES (%s, %s, %s)", (handshake.mac, handshake.url, handshake.time)) connection.commit() cursor.close() pool.putconn(connection)
def random_post(): connection = pool.getconn() cursor = connection.cursor() query = "SELECT service, \"user\", id FROM booru_posts WHERE random() < 0.01 LIMIT 1" cursor.execute(query) random = cursor.fetchall() cursor.close() if connection: pool.putconn(connection) return redirect(f'/{random[0][0]}/user/{random[0][1]}/post/{random[0][2]}')
def _get_conn(conn=None, **kwargs): if conn is not None: return conn, None if pool is not None: return pool.getconn(), _put_conn if not kwargs: raise RuntimeError( "Exporting R-tree to PostGIS requires either passing a connection object, initializing a " "connection pool, or providing keyword arguments that can be used to initalize a " "connection. Please check the documentation for details.") return psycopg2.connect(**kwargs), _close_conn
def use_pure_sql(): conn = pool.getconn() cur = conn.cursor() cur.execute("select * from person where person_id in ('mosky', 'andy') order by person_id") result = cur.fetchall() cur.close() pool.putconn(conn) return result
def hello_world(): result = [] connection = pool.getconn() cur = connection.cursor() cur.execute('select first_name, last_name from people limit 100;') for first_name, last_name in cur.fetchall(): result.append('{} {}<br>'.format(first_name, last_name)) pool.putconn(connection) return '<html><head></head><body>{}</body></html>'.format( '\n'.join(result) )
def random_artist(): connection = pool.getconn() cursor = connection.cursor() query = "SELECT id, service FROM lookup WHERE service != 'discord-channel' ORDER BY random() LIMIT 1" cursor.execute(query) random = cursor.fetchall() cursor.close() if connection: pool.putconn(connection) if len(random) == 0: return redirect('back') return redirect(f'/{random[0][1]}/user/{random[0][0]}')
def execute_sql(pool, sql, params): connection = pool.getconn() if not connection: raise Exception('[psql] Cannot acquare connection from pool') cursor = connection.cursor() cursor.execute(sql, params) rows = cursor.fetchall() cursor.close() pool.putconn(connection) return rows
def fun(*args, **kwargs): conn = pool.getconn() cur = conn.cursor() try: ret = f(cur, *args, **kwargs) conn.commit() return ret except StandardError as err: conn.rollback() raise err finally: pool.putconn(conn)
def list(): """List groups created by the user""" conn = pool.getconn() cursor = conn.cursor() cursor.execute("""SELECT * FROM groups""") data = cursor.fetchall() conn.commit() cursor.close() table = PrettyTable() table.field_names = ['MACs', 'Name'] for i in data: table.add_row(i) click.echo(table)
def wrapped(*p, **kw): conn = pool.getconn() kw[keyword] = conn.cursor() try: result = func(*p, **kw) if autocommit: conn.commit() return result except: conn.rollback() raise finally: pool.putconn(conn)
def get_db_connection_from_pool(request_id): url = urlparse(app.config['DATABASE_URL']) try: log_utils.log_database_connection_from_pool(request_id, url, "SUCCESS") connection = pool.getconn() yield connection except exception.DatabaseConnectionError: log_utils.log_database_connection_from_pool(request_id, url, "FAILURE") raise exception.ServiceError finally: pool.putconn(connection) log_utils.log_database_connection_back_to_pool(request_id, url, "SUCCESS")
def getcursor(**kwargs): pool = UnpackHelpers.get_sql_pool() conn = pool.getconn() try: yield conn.cursor(**kwargs) conn.commit() except Exception as e: conn.rollback() raise finally: pool.putconn(conn)
def list(): """List all packets sniffed by the daemon""" conn = pool.getconn() cursor = conn.cursor() cursor.execute("""SELECT * FROM packets""") data = cursor.fetchall() conn.commit() cursor.close() table = PrettyTable() table.field_names = ['MAC', 'URL', 'Timestamp'] for i in data: table.add_row(i) click.echo(table)
def search(mac): """Display all packets sent by a certain MAC address""" conn = pool.getconn() cursor = conn.cursor() cursor.execute("""SELECT * FROM packets where mac=%s""", (mac,)) data = cursor.fetchall() conn.commit() cursor.close() table = PrettyTable() table.field_names = ['MAC', 'URL', 'Timestamp'] for i in data: table.add_row(i) click.echo(table)
def get_db_connection(): try: global pool pool = psycopg2.pool.ThreadedConnectionPool(1, 20, user = app.config['DB_USER'], password = app.config['DB_PASSWORD'], host = app.config['DB_HOST'], port = app.config['DB_PORT'], database = app.config['DB_NAME']) connection = pool.getconn() yield connection finally: pool.putconn(connection)
def postgres_session(pool): engine = pool.getconn() session = engine.cursor() try: yield session session.close() except Exception as e: session.close() engine.rollback() raise e finally: session.close() engine.commit() pool.putconn(engine)
def tr_start(self, context): conn = pool.getconn() context.db = conn.cursor()
def tr_start(self): conn = pool.getconn() env.context.pgcursor = conn.cursor()
def __init__(self, pool): self._pool = pool self._conn = pool.getconn() self._conn.set_session(autocommit=True)
def getConnection(): global pool return pool.getconn()
# clear old data cur.execute("DELETE FROM word_definition WHERE wid = %s", (wid, )) cur.execute("DELETE FROM word_variants WHERE wid = %s", (wid, )) # request bing server for word detail word = bing.search(word) if None == word: return False # pronounce prus = word["pronounces"].get("us") preng = word["pronounces"].get("eng") cur.execute("UPDATE word SET pronounce_eng = %s, pronounce_us = %s WHERE wid = %s", (preng, prus, wid)) # definitions params = ((wid, defi["pos"], defi["def"]) for defi in word["definitions"]) cur.executemany("INSERT INTO word_definition (wid, type, definition) VALUES (%s, %s, %s)", params) # variants params = ((wid, v["kind"], v["word"]) for v in word["variants"]) cur.executemany("INSERT INTO word_variants (wid, kind, word) VALUES (%s, %s, %s)", params) return True else: return None conn = pool.getconn() try: checkandinit(conn) except StandardError: pass finally: pool.putconn(conn) del conn